Lines Matching refs:node

92 	struct backref_node *node[2];  member
197 struct backref_node *node);
199 struct backref_node *node);
220 struct backref_node *node; in backref_cache_cleanup() local
224 node = list_entry(cache->detached.next, in backref_cache_cleanup()
226 remove_backref_node(cache, node); in backref_cache_cleanup()
230 node = list_entry(cache->leaves.next, in backref_cache_cleanup()
232 remove_backref_node(cache, node); in backref_cache_cleanup()
248 struct backref_node *node; in alloc_backref_node() local
250 node = kzalloc(sizeof(*node), GFP_NOFS); in alloc_backref_node()
251 if (node) { in alloc_backref_node()
252 INIT_LIST_HEAD(&node->list); in alloc_backref_node()
253 INIT_LIST_HEAD(&node->upper); in alloc_backref_node()
254 INIT_LIST_HEAD(&node->lower); in alloc_backref_node()
255 RB_CLEAR_NODE(&node->rb_node); in alloc_backref_node()
258 return node; in alloc_backref_node()
262 struct backref_node *node) in free_backref_node() argument
264 if (node) { in free_backref_node()
266 kfree(node); in free_backref_node()
290 struct rb_node *node) in tree_insert() argument
308 rb_link_node(node, parent, p); in tree_insert()
309 rb_insert_color(node, root); in tree_insert()
346 static struct backref_node *walk_up_backref(struct backref_node *node, in walk_up_backref() argument
353 while (!list_empty(&node->upper)) { in walk_up_backref()
354 edge = list_entry(node->upper.next, in walk_up_backref()
357 node = edge->node[UPPER]; in walk_up_backref()
359 BUG_ON(node->detached); in walk_up_backref()
361 return node; in walk_up_backref()
376 lower = edge->node[LOWER]; in walk_down_backref()
385 return edge->node[UPPER]; in walk_down_backref()
391 static void unlock_node_buffer(struct backref_node *node) in unlock_node_buffer() argument
393 if (node->locked) { in unlock_node_buffer()
394 btrfs_tree_unlock(node->eb); in unlock_node_buffer()
395 node->locked = 0; in unlock_node_buffer()
399 static void drop_node_buffer(struct backref_node *node) in drop_node_buffer() argument
401 if (node->eb) { in drop_node_buffer()
402 unlock_node_buffer(node); in drop_node_buffer()
403 free_extent_buffer(node->eb); in drop_node_buffer()
404 node->eb = NULL; in drop_node_buffer()
409 struct backref_node *node) in drop_backref_node() argument
411 BUG_ON(!list_empty(&node->upper)); in drop_backref_node()
413 drop_node_buffer(node); in drop_backref_node()
414 list_del(&node->list); in drop_backref_node()
415 list_del(&node->lower); in drop_backref_node()
416 if (!RB_EMPTY_NODE(&node->rb_node)) in drop_backref_node()
417 rb_erase(&node->rb_node, &tree->rb_root); in drop_backref_node()
418 free_backref_node(tree, node); in drop_backref_node()
425 struct backref_node *node) in remove_backref_node() argument
430 if (!node) in remove_backref_node()
433 BUG_ON(!node->lowest && !node->detached); in remove_backref_node()
434 while (!list_empty(&node->upper)) { in remove_backref_node()
435 edge = list_entry(node->upper.next, struct backref_edge, in remove_backref_node()
437 upper = edge->node[UPPER]; in remove_backref_node()
443 BUG_ON(!list_empty(&node->upper)); in remove_backref_node()
444 drop_backref_node(cache, node); in remove_backref_node()
445 node = upper; in remove_backref_node()
446 node->lowest = 1; in remove_backref_node()
459 drop_backref_node(cache, node); in remove_backref_node()
463 struct backref_node *node, u64 bytenr) in update_backref_node() argument
466 rb_erase(&node->rb_node, &cache->rb_root); in update_backref_node()
467 node->bytenr = bytenr; in update_backref_node()
468 rb_node = tree_insert(&cache->rb_root, node->bytenr, &node->rb_node); in update_backref_node()
479 struct backref_node *node; in update_backref_cache() local
496 node = list_entry(cache->detached.next, in update_backref_cache()
498 remove_backref_node(cache, node); in update_backref_cache()
502 node = list_entry(cache->changed.next, in update_backref_cache()
504 list_del_init(&node->list); in update_backref_cache()
505 BUG_ON(node->pending); in update_backref_cache()
506 update_backref_node(cache, node, node->new_bytenr); in update_backref_cache()
514 list_for_each_entry(node, &cache->pending[level], list) { in update_backref_cache()
515 BUG_ON(!node->pending); in update_backref_cache()
516 if (node->bytenr == node->new_bytenr) in update_backref_cache()
518 update_backref_node(cache, node, node->new_bytenr); in update_backref_cache()
556 struct mapping_node *node; in find_reloc_root() local
562 node = rb_entry(rb_node, struct mapping_node, rb_node); in find_reloc_root()
563 root = (struct btrfs_root *)node->data; in find_reloc_root()
691 struct backref_node *node = NULL; local
714 node = alloc_backref_node(cache);
715 if (!node) {
720 node->bytenr = bytenr;
721 node->level = level;
722 node->lowest = 1;
723 cur = node;
754 exist = edge->node[UPPER];
876 edge->node[LOWER] = cur;
877 edge->node[UPPER] = upper;
995 edge->node[LOWER] = lower;
996 edge->node[UPPER] = upper;
1025 cur = edge->node[UPPER];
1033 ASSERT(node->checked);
1034 cowonly = node->cowonly;
1036 rb_node = tree_insert(&cache->rb_root, node->bytenr,
1037 &node->rb_node);
1039 backref_tree_panic(rb_node, -EEXIST, node->bytenr);
1040 list_add_tail(&node->lower, &cache->leaves);
1043 list_for_each_entry(edge, &node->upper, list[LOWER])
1049 upper = edge->node[UPPER];
1052 lower = edge->node[LOWER];
1107 if (upper == node)
1108 node = NULL;
1118 lower = edge->node[LOWER];
1147 lower = edge->node[LOWER];
1148 upper = edge->node[UPPER];
1177 ASSERT(!node || !node->detached);
1178 return node;
1193 struct backref_node *node = NULL; local
1204 node = rb_entry(rb_node, struct backref_node, rb_node);
1205 if (node->detached)
1206 node = NULL;
1208 BUG_ON(node->new_bytenr != reloc_root->node->start);
1211 if (!node) {
1215 node = rb_entry(rb_node, struct backref_node,
1217 BUG_ON(node->detached);
1221 if (!node)
1228 new_node->bytenr = dest->node->start;
1229 new_node->level = node->level;
1230 new_node->lowest = node->lowest;
1234 if (!node->lowest) {
1235 list_for_each_entry(edge, &node->lower, list[UPPER]) {
1240 new_edge->node[UPPER] = new_node;
1241 new_edge->node[LOWER] = edge->node[LOWER];
1257 &new_edge->node[LOWER]->upper);
1278 struct mapping_node *node; local
1281 node = kmalloc(sizeof(*node), GFP_NOFS);
1282 if (!node)
1285 node->bytenr = root->node->start;
1286 node->data = root;
1290 node->bytenr, &node->rb_node);
1295 "tree", node->bytenr);
1296 kfree(node);
1311 struct mapping_node *node = NULL; local
1316 root->node->start);
1318 node = rb_entry(rb_node, struct mapping_node, rb_node);
1319 rb_erase(&node->rb_node, &rc->reloc_root_tree.rb_root);
1323 if (!node)
1325 BUG_ON((struct btrfs_root *)node->data != root);
1330 kfree(node);
1340 struct mapping_node *node = NULL; local
1345 root->node->start);
1347 node = rb_entry(rb_node, struct mapping_node, rb_node);
1348 rb_erase(&node->rb_node, &rc->reloc_root_tree.rb_root);
1352 if (!node)
1354 BUG_ON((struct btrfs_root *)node->data != root);
1357 node->bytenr = new_bytenr;
1359 node->bytenr, &node->rb_node);
1362 backref_tree_panic(rb_node, -EEXIST, node->bytenr);
1400 ret = btrfs_copy_root(trans, root, root->node, &eb,
1497 if (reloc_root->commit_root != reloc_root->node) {
1498 btrfs_set_root_node(root_item, reloc_root->node);
1517 struct rb_node *node; local
1524 node = root->inode_tree.rb_node;
1526 while (node) {
1527 prev = node;
1528 entry = rb_entry(node, struct btrfs_inode, rb_node);
1531 node = node->rb_left;
1533 node = node->rb_right;
1537 if (!node) {
1541 node = prev;
1547 while (node) {
1548 entry = rb_entry(node, struct btrfs_inode, rb_node);
1559 node = rb_next(node);
2138 extent_buffer_get(reloc_root->node);
2139 path->nodes[level] = reloc_root->node;
2461 struct backref_node *node, argument
2468 next = node;
2484 if (next->new_bytenr != root->node->start) {
2487 next->new_bytenr = root->node->start;
2498 if (!next || next->level <= node->level)
2504 next = node;
2510 next = edges[index]->node[UPPER];
2523 struct backref_node *node) argument
2531 next = node;
2545 if (next != node)
2549 if (!next || next->level <= node->level)
2560 struct backref_node *node, int reserve) argument
2562 struct backref_node *next = node;
2568 BUG_ON(reserve && node->processed);
2573 if (next->processed && (reserve || next != node))
2584 next = edge->node[UPPER];
2593 struct backref_node *node) argument
2600 num_bytes = calcu_metadata_size(rc, node, 1) * 2;
2637 struct backref_node *node, argument
2653 BUG_ON(lowest && node->eb);
2655 path->lowest_level = node->level + 1;
2656 rc->backref_cache.path[node->level] = node;
2657 list_for_each_entry(edge, &node->upper, list[LOWER]) {
2660 upper = edge->node[UPPER];
2670 if (node->eb->start == bytenr)
2704 BUG_ON(bytenr != node->bytenr);
2706 if (node->eb->start == bytenr)
2721 if (!node->eb) {
2730 BUG_ON(node->eb != eb);
2733 node->eb->start);
2739 node->eb->start, blocksize,
2742 node->level, 0, 1);
2757 if (!err && node->pending) {
2758 drop_node_buffer(node);
2759 list_move_tail(&node->list, &rc->backref_cache.changed);
2760 node->pending = 0;
2770 struct backref_node *node, argument
2775 btrfs_node_key_to_cpu(node->eb, &key, 0);
2776 return do_relocation(trans, rc, node, &key, path, 0);
2785 struct backref_node *node; local
2791 node = list_entry(cache->pending[level].next,
2793 list_move_tail(&node->list, &list);
2794 BUG_ON(!node->pending);
2797 ret = link_to_upper(trans, rc, node, path);
2815 struct backref_node *node) argument
2818 if (node->level == 0 ||
2819 in_block_group(node->bytenr, rc->block_group)) {
2821 mark_block_processed(rc, node->bytenr, blocksize);
2823 node->processed = 1;
2831 struct backref_node *node) argument
2833 struct backref_node *next = node;
2852 next = edge->node[UPPER];
2895 struct backref_node *node, argument
2902 if (!node)
2905 BUG_ON(node->processed);
2906 root = select_one_root(trans, node);
2908 update_processed_blocks(rc, node);
2913 ret = reserve_metadata_space(trans, rc, node);
2920 BUG_ON(node->new_bytenr);
2921 BUG_ON(!list_empty(&node->list));
2924 node->new_bytenr = root->node->start;
2925 node->root = root;
2926 list_add_tail(&node->list, &rc->backref_cache.changed);
2928 path->lowest_level = node->level;
2935 update_processed_blocks(rc, node);
2937 ret = do_relocation(trans, rc, node, key, path, 1);
2940 if (ret || node->level == 0 || node->cowonly)
2941 remove_backref_node(&rc->backref_cache, node);
2952 struct backref_node *node; local
2988 node = build_backref_tree(rc, &block->key,
2990 if (IS_ERR(node)) {
2991 err = PTR_ERR(node);
2995 ret = relocate_tree_block(trans, rc, node, &block->key,
4531 struct backref_node *node; local
4544 if (buf == root->node)
4557 node = rc->backref_cache.path[level];
4558 BUG_ON(node->bytenr != buf->start &&
4559 node->new_bytenr != buf->start);
4561 drop_node_buffer(node);
4563 node->eb = cow;
4564 node->new_bytenr = cow->start;
4566 if (!node->pending) {
4567 list_move_tail(&node->list,
4569 node->pending = 1;
4573 __mark_block_processed(rc, node);