Lines Matching refs:locked_ref
2526 struct btrfs_delayed_ref_head *locked_ref = NULL; in __btrfs_run_delayed_refs() local
2536 if (!locked_ref) { in __btrfs_run_delayed_refs()
2541 locked_ref = btrfs_select_ref_head(trans); in __btrfs_run_delayed_refs()
2542 if (!locked_ref) { in __btrfs_run_delayed_refs()
2549 ret = btrfs_delayed_ref_lock(trans, locked_ref); in __btrfs_run_delayed_refs()
2558 locked_ref = NULL; in __btrfs_run_delayed_refs()
2576 spin_lock(&locked_ref->lock); in __btrfs_run_delayed_refs()
2577 btrfs_merge_delayed_refs(trans, delayed_refs, locked_ref); in __btrfs_run_delayed_refs()
2579 ref = select_delayed_ref(locked_ref); in __btrfs_run_delayed_refs()
2583 spin_unlock(&locked_ref->lock); in __btrfs_run_delayed_refs()
2584 unselect_delayed_ref_head(delayed_refs, locked_ref); in __btrfs_run_delayed_refs()
2585 locked_ref = NULL; in __btrfs_run_delayed_refs()
2596 ret = cleanup_ref_head(trans, locked_ref); in __btrfs_run_delayed_refs()
2604 locked_ref = NULL; in __btrfs_run_delayed_refs()
2611 rb_erase(&ref->ref_node, &locked_ref->ref_tree); in __btrfs_run_delayed_refs()
2622 locked_ref->ref_mod -= ref->ref_mod; in __btrfs_run_delayed_refs()
2625 locked_ref->ref_mod += ref->ref_mod; in __btrfs_run_delayed_refs()
2636 must_insert_reserved = locked_ref->must_insert_reserved; in __btrfs_run_delayed_refs()
2637 locked_ref->must_insert_reserved = 0; in __btrfs_run_delayed_refs()
2639 extent_op = locked_ref->extent_op; in __btrfs_run_delayed_refs()
2640 locked_ref->extent_op = NULL; in __btrfs_run_delayed_refs()
2641 spin_unlock(&locked_ref->lock); in __btrfs_run_delayed_refs()
2648 unselect_delayed_ref_head(delayed_refs, locked_ref); in __btrfs_run_delayed_refs()