Lines Matching refs:j

367 static unsigned inorder_next(unsigned j, unsigned size)  in inorder_next()  argument
369 if (j * 2 + 1 < size) { in inorder_next()
370 j = j * 2 + 1; in inorder_next()
372 while (j * 2 < size) in inorder_next()
373 j *= 2; in inorder_next()
375 j >>= ffz(j) + 1; in inorder_next()
377 return j; in inorder_next()
380 static unsigned inorder_prev(unsigned j, unsigned size) in inorder_prev() argument
382 if (j * 2 < size) { in inorder_prev()
383 j = j * 2; in inorder_prev()
385 while (j * 2 + 1 < size) in inorder_prev()
386 j = j * 2 + 1; in inorder_prev()
388 j >>= ffs(j); in inorder_prev()
390 return j; in inorder_prev()
406 static unsigned __to_inorder(unsigned j, unsigned size, unsigned extra) in __to_inorder() argument
408 unsigned b = fls(j); in __to_inorder()
411 j ^= 1U << (b - 1); in __to_inorder()
412 j <<= 1; in __to_inorder()
413 j |= 1; in __to_inorder()
414 j <<= shift; in __to_inorder()
416 if (j > extra) in __to_inorder()
417 j -= (j - extra) >> 1; in __to_inorder()
419 return j; in __to_inorder()
422 static unsigned to_inorder(unsigned j, struct bset_tree *t) in to_inorder() argument
424 return __to_inorder(j, t->size, t->extra); in to_inorder()
427 static unsigned __inorder_to_tree(unsigned j, unsigned size, unsigned extra) in __inorder_to_tree() argument
431 if (j > extra) in __inorder_to_tree()
432 j += j - extra; in __inorder_to_tree()
434 shift = ffs(j); in __inorder_to_tree()
436 j >>= shift; in __inorder_to_tree()
437 j |= roundup_pow_of_two(size) >> shift; in __inorder_to_tree()
439 return j; in __inorder_to_tree()
442 static unsigned inorder_to_tree(unsigned j, struct bset_tree *t) in inorder_to_tree() argument
444 return __inorder_to_tree(j, t->size, t->extra); in inorder_to_tree()
457 unsigned i = 1, j = rounddown_pow_of_two(size - 1);
464 if (__inorder_to_tree(i, size, extra) != j)
465 panic("size %10u j %10u i %10u", size, j, i);
467 if (__to_inorder(j, size, extra) != i)
468 panic("size %10u j %10u i %10u", size, j, i);
470 if (j == rounddown_pow_of_two(size) - 1)
473 BUG_ON(inorder_prev(inorder_next(j, size), size) != j);
475 j = inorder_next(j, size);
521 static struct bkey *tree_to_bkey(struct bset_tree *t, unsigned j) in tree_to_bkey() argument
523 return cacheline_to_bkey(t, to_inorder(j, t), t->tree[j].m); in tree_to_bkey()
526 static struct bkey *tree_to_prev_bkey(struct bset_tree *t, unsigned j) in tree_to_prev_bkey() argument
528 return (void *) (((uint64_t *) tree_to_bkey(t, j)) - t->prev[j]); in tree_to_prev_bkey()
554 static void make_bfloat(struct bset_tree *t, unsigned j) in make_bfloat() argument
556 struct bkey_float *f = &t->tree[j]; in make_bfloat()
557 struct bkey *m = tree_to_bkey(t, j); in make_bfloat()
558 struct bkey *p = tree_to_prev_bkey(t, j); in make_bfloat()
560 struct bkey *l = is_power_of_2(j) in make_bfloat()
562 : tree_to_prev_bkey(t, j >> ffs(j)); in make_bfloat()
564 struct bkey *r = is_power_of_2(j + 1) in make_bfloat()
566 : tree_to_bkey(t, j >> (ffz(j) + 1)); in make_bfloat()
592 unsigned j = roundup(t[-1].size, in bset_alloc_tree() local
595 t->tree = t[-1].tree + j; in bset_alloc_tree()
596 t->prev = t[-1].prev + j; in bset_alloc_tree()
638 unsigned j, cacheline = 1; in bch_bset_build_written_tree() local
656 for (j = inorder_next(0, t->size); in bch_bset_build_written_tree()
657 j; in bch_bset_build_written_tree()
658 j = inorder_next(j, t->size)) { in bch_bset_build_written_tree()
662 t->prev[j] = bkey_u64s(prev); in bch_bset_build_written_tree()
663 t->tree[j].m = bkey_to_cacheline_offset(t, cacheline++, k); in bch_bset_build_written_tree()
672 for (j = inorder_next(0, t->size); in bch_bset_build_written_tree()
673 j; in bch_bset_build_written_tree()
674 j = inorder_next(j, t->size)) in bch_bset_build_written_tree()
675 make_bfloat(t, j); in bch_bset_build_written_tree()
684 unsigned inorder, j = 1; in bch_bset_fix_invalidated_key() local
705 j = inorder_to_tree(inorder, t); in bch_bset_fix_invalidated_key()
707 if (j && in bch_bset_fix_invalidated_key()
708 j < t->size && in bch_bset_fix_invalidated_key()
709 k == tree_to_bkey(t, j)) in bch_bset_fix_invalidated_key()
711 make_bfloat(t, j); in bch_bset_fix_invalidated_key()
712 j = j * 2; in bch_bset_fix_invalidated_key()
713 } while (j < t->size); in bch_bset_fix_invalidated_key()
715 j = inorder_to_tree(inorder + 1, t); in bch_bset_fix_invalidated_key()
717 if (j && in bch_bset_fix_invalidated_key()
718 j < t->size && in bch_bset_fix_invalidated_key()
719 k == tree_to_prev_bkey(t, j)) in bch_bset_fix_invalidated_key()
721 make_bfloat(t, j); in bch_bset_fix_invalidated_key()
722 j = j * 2 + 1; in bch_bset_fix_invalidated_key()
723 } while (j < t->size); in bch_bset_fix_invalidated_key()
732 unsigned j = bkey_to_cacheline(t, k); in bch_bset_fix_lookup_table() local
742 while (j < t->size && in bch_bset_fix_lookup_table()
743 table_to_bkey(t, j) <= k) in bch_bset_fix_lookup_table()
744 j++; in bch_bset_fix_lookup_table()
749 for (; j < t->size; j++) { in bch_bset_fix_lookup_table()
750 t->prev[j] += shift; in bch_bset_fix_lookup_table()
752 if (t->prev[j] > 7) { in bch_bset_fix_lookup_table()
753 k = table_to_bkey(t, j - 1); in bch_bset_fix_lookup_table()
755 while (k < cacheline_to_bkey(t, j, 0)) in bch_bset_fix_lookup_table()
758 t->prev[j] = bkey_to_cacheline_offset(t, j, k); in bch_bset_fix_lookup_table()
896 unsigned inorder, j, n = 1; in bset_search_tree() local
904 j = n; in bset_search_tree()
905 f = &t->tree[j]; in bset_search_tree()
916 n = j * 2 + (((unsigned) in bset_search_tree()
920 n = (bkey_cmp(tree_to_bkey(t, j), search) > 0) in bset_search_tree()
921 ? j * 2 in bset_search_tree()
922 : j * 2 + 1; in bset_search_tree()
925 inorder = to_inorder(j, t); in bset_search_tree()
935 f = &t->tree[inorder_next(j, t->size)]; in bset_search_tree()
943 f = &t->tree[inorder_prev(j, t->size)]; in bset_search_tree()
1315 size_t j; in bch_btree_keys_stats() local
1323 for (j = 1; j < t->size; j++) in bch_btree_keys_stats()
1324 if (t->tree[j].exponent == 127) in bch_btree_keys_stats()