Lines Matching refs:path

23 	struct btree_path *path;  in bch2_btree_node_lock_counts()  local
32 trans_for_each_path(trans, path, i) in bch2_btree_node_lock_counts()
33 if (path != skip && &path->l[level].b->c == b) { in bch2_btree_node_lock_counts()
34 int t = btree_node_locked_type(path, level); in bch2_btree_node_lock_counts()
46 struct btree_path *path, struct btree *b) in bch2_btree_node_unlock_write() argument
48 bch2_btree_node_unlock_write_inlined(trans, path, b); in bch2_btree_node_unlock_write()
312 struct btree_path *path = paths + path_idx; in bch2_check_for_deadlock() local
313 if (!path->nodes_locked) in bch2_check_for_deadlock()
325 int lock_held = btree_node_locked_type(path, top->level); in bch2_check_for_deadlock()
330 b = &READ_ONCE(path->l[top->level].b)->c; in bch2_check_for_deadlock()
404 int __bch2_btree_node_lock_write(struct btree_trans *trans, struct btree_path *path, in __bch2_btree_node_lock_write() argument
423 mark_btree_node_locked_noreset(path, b->level, BTREE_NODE_INTENT_LOCKED); in __bch2_btree_node_lock_write()
429 struct btree_path *path, in bch2_btree_node_lock_write_nofail() argument
432 int ret = __btree_node_lock_write(trans, path, b, true); in bch2_btree_node_lock_write_nofail()
439 struct btree_path *path, in btree_path_get_locks() argument
443 unsigned l = path->level; in btree_path_get_locks()
447 if (!btree_path_node(path, l)) in btree_path_get_locks()
451 ? bch2_btree_node_upgrade(trans, path, l) in btree_path_get_locks()
452 : bch2_btree_node_relock(trans, path, l))) { in btree_path_get_locks()
457 f->b = path->l[l].b; in btree_path_get_locks()
462 } while (l < path->locks_want); in btree_path_get_locks()
470 __bch2_btree_path_unlock(trans, path); in btree_path_get_locks()
471 btree_path_set_dirty(path, BTREE_ITER_NEED_TRAVERSE); in btree_path_get_locks()
474 path->l[fail_idx].b = upgrade in btree_path_get_locks()
481 if (path->uptodate == BTREE_ITER_NEED_RELOCK) in btree_path_get_locks()
482 path->uptodate = BTREE_ITER_UPTODATE; in btree_path_get_locks()
484 return path->uptodate < BTREE_ITER_NEED_RELOCK; in btree_path_get_locks()
488 struct btree_path *path, unsigned level, in __bch2_btree_node_relock() argument
491 struct btree *b = btree_path_node(path, level); in __bch2_btree_node_relock()
492 int want = __btree_lock_want(path, level); in __bch2_btree_node_relock()
497 if (six_relock_type(&b->c.lock, want, path->l[level].lock_seq) || in __bch2_btree_node_relock()
498 (btree_node_lock_seq_matches(path, b, level) && in __bch2_btree_node_relock()
500 mark_btree_node_locked(trans, path, level, want); in __bch2_btree_node_relock()
505 trace_and_count(trans->c, btree_path_relock_fail, trans, _RET_IP_, path, level); in __bch2_btree_node_relock()
512 struct btree_path *path, unsigned level) in bch2_btree_node_upgrade() argument
514 struct btree *b = path->l[level].b; in bch2_btree_node_upgrade()
515 struct six_lock_count count = bch2_btree_node_lock_counts(trans, path, &b->c, level); in bch2_btree_node_upgrade()
517 if (!is_btree_node(path, level)) in bch2_btree_node_upgrade()
520 switch (btree_lock_want(path, level)) { in bch2_btree_node_upgrade()
522 BUG_ON(btree_node_locked(path, level)); in bch2_btree_node_upgrade()
525 BUG_ON(btree_node_intent_locked(path, level)); in bch2_btree_node_upgrade()
526 return bch2_btree_node_relock(trans, path, level); in bch2_btree_node_upgrade()
533 if (btree_node_intent_locked(path, level)) in bch2_btree_node_upgrade()
539 if (btree_node_locked(path, level)) { in bch2_btree_node_upgrade()
549 if (six_relock_type(&b->c.lock, SIX_LOCK_intent, path->l[level].lock_seq)) in bch2_btree_node_upgrade()
557 if (btree_node_lock_seq_matches(path, b, level) && in bch2_btree_node_upgrade()
559 btree_node_unlock(trans, path, level); in bch2_btree_node_upgrade()
563 trace_and_count(trans->c, btree_path_upgrade_fail, trans, _RET_IP_, path, level); in bch2_btree_node_upgrade()
566 mark_btree_node_locked_noreset(path, level, BTREE_NODE_INTENT_LOCKED); in bch2_btree_node_upgrade()
576 struct btree_path *path) in bch2_btree_path_relock_intent() argument
580 for (l = path->level; in bch2_btree_path_relock_intent()
581 l < path->locks_want && btree_path_node(path, l); in bch2_btree_path_relock_intent()
583 if (!bch2_btree_node_relock(trans, path, l)) { in bch2_btree_path_relock_intent()
584 __bch2_btree_path_unlock(trans, path); in bch2_btree_path_relock_intent()
585 btree_path_set_dirty(path, BTREE_ITER_NEED_TRAVERSE); in bch2_btree_path_relock_intent()
586 trace_and_count(trans->c, trans_restart_relock_path_intent, trans, _RET_IP_, path); in bch2_btree_path_relock_intent()
595 bool bch2_btree_path_relock_norestart(struct btree_trans *trans, struct btree_path *path) in bch2_btree_path_relock_norestart() argument
599 bool ret = btree_path_get_locks(trans, path, false, &f); in bch2_btree_path_relock_norestart()
605 struct btree_path *path, unsigned long trace_ip) in __bch2_btree_path_relock() argument
607 if (!bch2_btree_path_relock_norestart(trans, path)) { in __bch2_btree_path_relock()
608 trace_and_count(trans->c, trans_restart_relock_path, trans, trace_ip, path); in __bch2_btree_path_relock()
616 struct btree_path *path, in bch2_btree_path_upgrade_noupgrade_sibs() argument
620 EBUG_ON(path->locks_want >= new_locks_want); in bch2_btree_path_upgrade_noupgrade_sibs()
622 path->locks_want = new_locks_want; in bch2_btree_path_upgrade_noupgrade_sibs()
624 bool ret = btree_path_get_locks(trans, path, true, f); in bch2_btree_path_upgrade_noupgrade_sibs()
630 struct btree_path *path, in __bch2_btree_path_upgrade() argument
634 bool ret = bch2_btree_path_upgrade_noupgrade_sibs(trans, path, new_locks_want, f); in __bch2_btree_path_upgrade()
657 if (!path->cached && !trans->in_traverse_all) { in __bch2_btree_path_upgrade()
662 if (linked != path && in __bch2_btree_path_upgrade()
663 linked->cached == path->cached && in __bch2_btree_path_upgrade()
664 linked->btree_id == path->btree_id && in __bch2_btree_path_upgrade()
676 struct btree_path *path, in __bch2_btree_path_downgrade() argument
679 unsigned l, old_locks_want = path->locks_want; in __bch2_btree_path_downgrade()
684 EBUG_ON(path->locks_want < new_locks_want); in __bch2_btree_path_downgrade()
686 path->locks_want = new_locks_want; in __bch2_btree_path_downgrade()
688 while (path->nodes_locked && in __bch2_btree_path_downgrade()
689 (l = btree_path_highest_level_locked(path)) >= path->locks_want) { in __bch2_btree_path_downgrade()
690 if (l > path->level) { in __bch2_btree_path_downgrade()
691 btree_node_unlock(trans, path, l); in __bch2_btree_path_downgrade()
693 if (btree_node_intent_locked(path, l)) { in __bch2_btree_path_downgrade()
694 six_lock_downgrade(&path->l[l].b->c.lock); in __bch2_btree_path_downgrade()
695 mark_btree_node_locked_noreset(path, l, BTREE_NODE_READ_LOCKED); in __bch2_btree_path_downgrade()
701 bch2_btree_path_verify_locks(path); in __bch2_btree_path_downgrade()
703 trace_path_downgrade(trans, _RET_IP_, path, old_locks_want); in __bch2_btree_path_downgrade()
710 struct btree_path *path; in bch2_trans_downgrade() local
716 trans_for_each_path(trans, path, i) in bch2_trans_downgrade()
717 if (path->ref) in bch2_trans_downgrade()
718 bch2_btree_path_downgrade(trans, path); in bch2_trans_downgrade()
723 struct btree_path *path; in __bch2_trans_unlock() local
726 trans_for_each_path(trans, path, i) in __bch2_trans_unlock()
727 __bch2_btree_path_unlock(trans, path); in __bch2_trans_unlock()
730 …atic noinline __cold int bch2_trans_relock_fail(struct btree_trans *trans, struct btree_path *path, in bch2_trans_relock_fail() argument
739 bch2_bpos_to_text(&buf, path->pos); in bch2_trans_relock_fail()
740 prt_printf(&buf, " l=%u seq=%u node seq=", f->l, path->l[f->l].lock_seq); in bch2_trans_relock_fail()
774 struct btree_path *path; in __bch2_trans_relock() local
777 trans_for_each_path(trans, path, i) { in __bch2_trans_relock()
780 if (path->should_be_locked && in __bch2_trans_relock()
781 !btree_path_get_locks(trans, path, false, &f)) in __bch2_trans_relock()
782 return bch2_trans_relock_fail(trans, path, &f, trace); in __bch2_trans_relock()
835 void bch2_btree_path_verify_locks(struct btree_path *path) in bch2_btree_path_verify_locks() argument
842 BUG_ON(path->uptodate == BTREE_ITER_UPTODATE && in bch2_btree_path_verify_locks()
843 btree_path_node(path, path->level) && in bch2_btree_path_verify_locks()
844 !path->nodes_locked); in bch2_btree_path_verify_locks()
846 if (!path->nodes_locked) in bch2_btree_path_verify_locks()
850 int want = btree_lock_want(path, l); in bch2_btree_path_verify_locks()
851 int have = btree_node_locked_type(path, l); in bch2_btree_path_verify_locks()
853 BUG_ON(!is_btree_node(path, l) && have != BTREE_NODE_UNLOCKED); in bch2_btree_path_verify_locks()
855 BUG_ON(is_btree_node(path, l) && in bch2_btree_path_verify_locks()
864 struct btree_path *path; in bch2_trans_locked() local
867 trans_for_each_path(trans, path, i) in bch2_trans_locked()
868 if (path->nodes_locked) in bch2_trans_locked()
880 struct btree_path *path; in bch2_trans_verify_locks() local
883 trans_for_each_path(trans, path, i) in bch2_trans_verify_locks()
884 bch2_btree_path_verify_locks(path); in bch2_trans_verify_locks()