Lines Matching refs:ref
199 struct __prelim_ref *ref; in __add_prelim_ref() local
204 ref = kmem_cache_alloc(btrfs_prelim_ref_cache, gfp_mask); in __add_prelim_ref()
205 if (!ref) in __add_prelim_ref()
208 ref->root_id = root_id; in __add_prelim_ref()
210 ref->key_for_search = *key; in __add_prelim_ref()
212 memset(&ref->key_for_search, 0, sizeof(ref->key_for_search)); in __add_prelim_ref()
214 ref->inode_list = NULL; in __add_prelim_ref()
215 ref->level = level; in __add_prelim_ref()
216 ref->count = count; in __add_prelim_ref()
217 ref->parent = parent; in __add_prelim_ref()
218 ref->wanted_disk_byte = wanted_disk_byte; in __add_prelim_ref()
219 list_add_tail(&ref->list, head); in __add_prelim_ref()
225 struct ulist *parents, struct __prelim_ref *ref, in add_all_parents() argument
233 struct btrfs_key *key_for_search = &ref->key_for_search; in add_all_parents()
237 u64 wanted_disk_byte = ref->wanted_disk_byte; in add_all_parents()
310 struct __prelim_ref *ref, in __resolve_indirect_ref() argument
319 int level = ref->level; in __resolve_indirect_ref()
322 root_key.objectid = ref->root_id; in __resolve_indirect_ref()
346 ret = btrfs_search_old_slot(root, &ref->key_for_search, path, time_seq); in __resolve_indirect_ref()
353 ref->root_id, level, ref->count, ret, in __resolve_indirect_ref()
354 ref->key_for_search.objectid, ref->key_for_search.type, in __resolve_indirect_ref()
355 ref->key_for_search.offset); in __resolve_indirect_ref()
369 ret = add_all_parents(root, path, parents, ref, level, time_seq, in __resolve_indirect_ref()
388 struct __prelim_ref *ref; in __resolve_indirect_refs() local
404 list_for_each_entry_safe(ref, ref_safe, head, list) { in __resolve_indirect_refs()
405 if (ref->parent) /* already direct */ in __resolve_indirect_refs()
407 if (ref->count == 0) in __resolve_indirect_refs()
409 if (root_objectid && ref->root_id != root_objectid) { in __resolve_indirect_refs()
413 err = __resolve_indirect_ref(fs_info, path, time_seq, ref, in __resolve_indirect_refs()
430 ref->parent = node ? node->val : 0; in __resolve_indirect_refs()
431 ref->inode_list = node ? in __resolve_indirect_refs()
442 memcpy(new_ref, ref, sizeof(*ref)); in __resolve_indirect_refs()
446 list_add(&new_ref->list, &ref->list); in __resolve_indirect_refs()
484 struct __prelim_ref *ref; in __add_missing_keys() local
485 ref = list_entry(pos, struct __prelim_ref, list); in __add_missing_keys()
487 if (ref->parent) in __add_missing_keys()
489 if (ref->key_for_search.type) in __add_missing_keys()
491 BUG_ON(!ref->wanted_disk_byte); in __add_missing_keys()
492 eb = read_tree_block(fs_info->tree_root, ref->wanted_disk_byte, in __add_missing_keys()
500 btrfs_item_key_to_cpu(eb, &ref->key_for_search, 0); in __add_missing_keys()
502 btrfs_node_key_to_cpu(eb, &ref->key_for_search, 0); in __add_missing_keys()
612 struct btrfs_delayed_tree_ref *ref; in __add_delayed_refs() local
614 ref = btrfs_delayed_node_to_tree_ref(node); in __add_delayed_refs()
615 ret = __add_prelim_ref(prefs, ref->root, &op_key, in __add_delayed_refs()
616 ref->level + 1, 0, node->bytenr, in __add_delayed_refs()
621 struct btrfs_delayed_tree_ref *ref; in __add_delayed_refs() local
623 ref = btrfs_delayed_node_to_tree_ref(node); in __add_delayed_refs()
624 ret = __add_prelim_ref(prefs, ref->root, NULL, in __add_delayed_refs()
625 ref->level + 1, ref->parent, in __add_delayed_refs()
631 struct btrfs_delayed_data_ref *ref; in __add_delayed_refs() local
632 ref = btrfs_delayed_node_to_data_ref(node); in __add_delayed_refs()
634 key.objectid = ref->objectid; in __add_delayed_refs()
636 key.offset = ref->offset; in __add_delayed_refs()
642 if (inum && ref->objectid != inum) { in __add_delayed_refs()
647 ret = __add_prelim_ref(prefs, ref->root, &key, 0, 0, in __add_delayed_refs()
653 struct btrfs_delayed_data_ref *ref; in __add_delayed_refs() local
655 ref = btrfs_delayed_node_to_data_ref(node); in __add_delayed_refs()
657 key.objectid = ref->objectid; in __add_delayed_refs()
659 key.offset = ref->offset; in __add_delayed_refs()
660 ret = __add_prelim_ref(prefs, ref->root, &key, 0, in __add_delayed_refs()
661 ref->parent, node->bytenr, in __add_delayed_refs()
901 struct __prelim_ref *ref; local
1017 ref = list_first_entry(&prefs, struct __prelim_ref, list);
1018 WARN_ON(ref->count < 0);
1019 if (roots && ref->count && ref->root_id && ref->parent == 0) {
1020 if (root_objectid && ref->root_id != root_objectid) {
1026 ret = ulist_add(roots, ref->root_id, 0, GFP_NOFS);
1030 if (ref->count && ref->parent) {
1031 if (extent_item_pos && !ref->inode_list &&
1032 ref->level == 0) {
1036 ref->parent, 0);
1050 ref->inode_list = eie;
1052 ret = ulist_add_merge_ptr(refs, ref->parent,
1053 ref->inode_list,
1065 eie->next = ref->inode_list;
1069 list_del(&ref->list);
1070 kmem_cache_free(btrfs_prelim_ref_cache, ref);
1076 ref = list_first_entry(&prefs, struct __prelim_ref, list);
1077 list_del(&ref->list);
1078 kmem_cache_free(btrfs_prelim_ref_cache, ref);
1081 ref = list_first_entry(&prefs_delayed, struct __prelim_ref,
1083 list_del(&ref->list);
1084 kmem_cache_free(btrfs_prelim_ref_cache, ref);