Lines Matching refs:ref

184 				    struct btrfs_delayed_ref_node *ref)  in drop_delayed_ref()  argument
186 if (btrfs_delayed_ref_is_head(ref)) { in drop_delayed_ref()
187 head = btrfs_delayed_node_to_head(ref); in drop_delayed_ref()
191 list_del(&ref->list); in drop_delayed_ref()
193 ref->in_tree = 0; in drop_delayed_ref()
194 btrfs_put_delayed_ref(ref); in drop_delayed_ref()
203 struct btrfs_delayed_ref_node *ref, in merge_ref() argument
217 if (next == ref) in merge_ref()
223 if (next->type != ref->type) in merge_ref()
226 if ((ref->type == BTRFS_TREE_BLOCK_REF_KEY || in merge_ref()
227 ref->type == BTRFS_SHARED_BLOCK_REF_KEY) && in merge_ref()
228 comp_tree_refs(btrfs_delayed_node_to_tree_ref(ref), in merge_ref()
230 ref->type)) in merge_ref()
232 if ((ref->type == BTRFS_EXTENT_DATA_REF_KEY || in merge_ref()
233 ref->type == BTRFS_SHARED_DATA_REF_KEY) && in merge_ref()
234 comp_data_refs(btrfs_delayed_node_to_data_ref(ref), in merge_ref()
238 if (ref->action == next->action) { in merge_ref()
241 if (ref->ref_mod < next->ref_mod) { in merge_ref()
242 swap(ref, next); in merge_ref()
249 ref->ref_mod += mod; in merge_ref()
250 if (ref->ref_mod == 0) { in merge_ref()
251 drop_delayed_ref(trans, delayed_refs, head, ref); in merge_ref()
257 WARN_ON(ref->type == BTRFS_TREE_BLOCK_REF_KEY || in merge_ref()
258 ref->type == BTRFS_SHARED_BLOCK_REF_KEY); in merge_ref()
272 struct btrfs_delayed_ref_node *ref; in btrfs_merge_delayed_refs() local
294 ref = list_first_entry(&head->ref_list, struct btrfs_delayed_ref_node, in btrfs_merge_delayed_refs()
296 while (&ref->list != &head->ref_list) { in btrfs_merge_delayed_refs()
297 if (seq && ref->seq >= seq) in btrfs_merge_delayed_refs()
300 if (merge_ref(trans, delayed_refs, head, ref, seq)) { in btrfs_merge_delayed_refs()
303 ref = list_first_entry(&head->ref_list, in btrfs_merge_delayed_refs()
309 ref = list_next_entry(ref, list); in btrfs_merge_delayed_refs()
395 struct btrfs_delayed_ref_node *ref) in add_delayed_ref_tail_merge() argument
408 if (exist->type != ref->type || exist->seq != ref->seq) in add_delayed_ref_tail_merge()
414 btrfs_delayed_node_to_tree_ref(ref), in add_delayed_ref_tail_merge()
415 ref->type)) in add_delayed_ref_tail_merge()
420 btrfs_delayed_node_to_data_ref(ref))) in add_delayed_ref_tail_merge()
425 if (exist->action == ref->action) { in add_delayed_ref_tail_merge()
426 mod = ref->ref_mod; in add_delayed_ref_tail_merge()
429 if (exist->ref_mod < ref->ref_mod) { in add_delayed_ref_tail_merge()
430 exist->action = ref->action; in add_delayed_ref_tail_merge()
432 exist->ref_mod = ref->ref_mod; in add_delayed_ref_tail_merge()
434 mod = -ref->ref_mod; in add_delayed_ref_tail_merge()
445 list_add_tail(&ref->list, &href->ref_list); in add_delayed_ref_tail_merge()
462 struct btrfs_delayed_ref_head *ref; in update_existing_head_ref() local
466 ref = btrfs_delayed_node_to_head(update); in update_existing_head_ref()
467 BUG_ON(existing_ref->is_data != ref->is_data); in update_existing_head_ref()
470 if (ref->must_insert_reserved) { in update_existing_head_ref()
478 existing_ref->must_insert_reserved = ref->must_insert_reserved; in update_existing_head_ref()
488 if (ref->extent_op) { in update_existing_head_ref()
490 existing_ref->extent_op = ref->extent_op; in update_existing_head_ref()
492 if (ref->extent_op->update_key) { in update_existing_head_ref()
494 &ref->extent_op->key, in update_existing_head_ref()
495 sizeof(ref->extent_op->key)); in update_existing_head_ref()
498 if (ref->extent_op->update_flags) { in update_existing_head_ref()
500 ref->extent_op->flags_to_set; in update_existing_head_ref()
503 btrfs_free_delayed_extent_op(ref->extent_op); in update_existing_head_ref()
536 struct btrfs_delayed_ref_node *ref, in add_delayed_ref_head() argument
579 atomic_set(&ref->refs, 1); in add_delayed_ref_head()
580 ref->bytenr = bytenr; in add_delayed_ref_head()
581 ref->num_bytes = num_bytes; in add_delayed_ref_head()
582 ref->ref_mod = count_mod; in add_delayed_ref_head()
583 ref->type = 0; in add_delayed_ref_head()
584 ref->action = 0; in add_delayed_ref_head()
585 ref->is_head = 1; in add_delayed_ref_head()
586 ref->in_tree = 1; in add_delayed_ref_head()
587 ref->seq = 0; in add_delayed_ref_head()
589 head_ref = btrfs_delayed_node_to_head(ref); in add_delayed_ref_head()
618 trace_add_delayed_ref_head(ref, head_ref, action); in add_delayed_ref_head()
625 update_existing_head_ref(delayed_refs, &existing->node, ref); in add_delayed_ref_head()
650 struct btrfs_delayed_ref_node *ref, u64 bytenr, in add_delayed_tree_ref() argument
667 atomic_set(&ref->refs, 1); in add_delayed_tree_ref()
668 ref->bytenr = bytenr; in add_delayed_tree_ref()
669 ref->num_bytes = num_bytes; in add_delayed_tree_ref()
670 ref->ref_mod = 1; in add_delayed_tree_ref()
671 ref->action = action; in add_delayed_tree_ref()
672 ref->is_head = 0; in add_delayed_tree_ref()
673 ref->in_tree = 1; in add_delayed_tree_ref()
674 ref->seq = seq; in add_delayed_tree_ref()
676 full_ref = btrfs_delayed_node_to_tree_ref(ref); in add_delayed_tree_ref()
680 ref->type = BTRFS_SHARED_BLOCK_REF_KEY; in add_delayed_tree_ref()
682 ref->type = BTRFS_TREE_BLOCK_REF_KEY; in add_delayed_tree_ref()
685 trace_add_delayed_tree_ref(ref, full_ref, action); in add_delayed_tree_ref()
687 ret = add_delayed_ref_tail_merge(trans, delayed_refs, head_ref, ref); in add_delayed_tree_ref()
704 struct btrfs_delayed_ref_node *ref, u64 bytenr, in add_delayed_data_ref() argument
722 atomic_set(&ref->refs, 1); in add_delayed_data_ref()
723 ref->bytenr = bytenr; in add_delayed_data_ref()
724 ref->num_bytes = num_bytes; in add_delayed_data_ref()
725 ref->ref_mod = 1; in add_delayed_data_ref()
726 ref->action = action; in add_delayed_data_ref()
727 ref->is_head = 0; in add_delayed_data_ref()
728 ref->in_tree = 1; in add_delayed_data_ref()
729 ref->seq = seq; in add_delayed_data_ref()
731 full_ref = btrfs_delayed_node_to_data_ref(ref); in add_delayed_data_ref()
735 ref->type = BTRFS_SHARED_DATA_REF_KEY; in add_delayed_data_ref()
737 ref->type = BTRFS_EXTENT_DATA_REF_KEY; in add_delayed_data_ref()
742 trace_add_delayed_data_ref(ref, full_ref, action); in add_delayed_data_ref()
744 ret = add_delayed_ref_tail_merge(trans, delayed_refs, head_ref, ref); in add_delayed_data_ref()
761 struct btrfs_delayed_tree_ref *ref; in btrfs_add_delayed_tree_ref() local
767 ref = kmem_cache_alloc(btrfs_delayed_tree_ref_cachep, GFP_NOFS); in btrfs_add_delayed_tree_ref()
768 if (!ref) in btrfs_add_delayed_tree_ref()
793 add_delayed_tree_ref(fs_info, trans, head_ref, &ref->node, bytenr, in btrfs_add_delayed_tree_ref()
802 kmem_cache_free(btrfs_delayed_tree_ref_cachep, ref); in btrfs_add_delayed_tree_ref()
817 struct btrfs_delayed_data_ref *ref; in btrfs_add_delayed_data_ref() local
823 ref = kmem_cache_alloc(btrfs_delayed_data_ref_cachep, GFP_NOFS); in btrfs_add_delayed_data_ref()
824 if (!ref) in btrfs_add_delayed_data_ref()
829 kmem_cache_free(btrfs_delayed_data_ref_cachep, ref); in btrfs_add_delayed_data_ref()
836 kmem_cache_free(btrfs_delayed_data_ref_cachep, ref); in btrfs_add_delayed_data_ref()
856 add_delayed_data_ref(fs_info, trans, head_ref, &ref->node, bytenr, in btrfs_add_delayed_data_ref()