Lines Matching refs:c
251 struct cache_set *c; member
683 #define btree_bytes(c) ((c)->btree_pages * PAGE_SIZE) argument
685 ((unsigned) (KEY_SIZE(&b->key) >> (b)->c->block_bits))
687 #define btree_default_blocks(c) \ argument
688 ((unsigned) ((PAGE_SECTORS * (c)->btree_pages) >> (c)->block_bits))
690 #define bucket_pages(c) ((c)->sb.bucket_size / PAGE_SECTORS) argument
691 #define bucket_bytes(c) ((c)->sb.bucket_size << 9) argument
692 #define block_bytes(c) ((c)->sb.block_size << 9) argument
694 #define prios_per_bucket(c) \ argument
695 ((bucket_bytes(c) - sizeof(struct prio_set)) / \
697 #define prio_buckets(c) \ argument
698 DIV_ROUND_UP((size_t) (c)->sb.nbuckets, prios_per_bucket(c))
700 static inline size_t sector_to_bucket(struct cache_set *c, sector_t s) in sector_to_bucket() argument
702 return s >> c->bucket_bits; in sector_to_bucket()
705 static inline sector_t bucket_to_sector(struct cache_set *c, size_t b) in bucket_to_sector() argument
707 return ((sector_t) b) << c->bucket_bits; in bucket_to_sector()
710 static inline sector_t bucket_remainder(struct cache_set *c, sector_t s) in bucket_remainder() argument
712 return s & (c->sb.bucket_size - 1); in bucket_remainder()
715 static inline struct cache *PTR_CACHE(struct cache_set *c, in PTR_CACHE() argument
719 return c->cache[PTR_DEV(k, ptr)]; in PTR_CACHE()
722 static inline size_t PTR_BUCKET_NR(struct cache_set *c, in PTR_BUCKET_NR() argument
726 return sector_to_bucket(c, PTR_OFFSET(k, ptr)); in PTR_BUCKET_NR()
729 static inline struct bucket *PTR_BUCKET(struct cache_set *c, in PTR_BUCKET() argument
733 return PTR_CACHE(c, k, ptr)->buckets + PTR_BUCKET_NR(c, k, ptr); in PTR_BUCKET()
742 static inline uint8_t ptr_stale(struct cache_set *c, const struct bkey *k, in ptr_stale() argument
745 return gen_after(PTR_BUCKET(c, k, i)->gen, PTR_GEN(k, i)); in ptr_stale()
748 static inline bool ptr_available(struct cache_set *c, const struct bkey *k, in ptr_available() argument
751 return (PTR_DEV(k, i) < MAX_CACHES_PER_SET) && PTR_CACHE(c, k, i); in ptr_available()
769 if (bch_cache_set_error((b)->c, __VA_ARGS__)) \
773 #define cache_bug(c, ...) \ argument
775 if (bch_cache_set_error(c, __VA_ARGS__)) \
785 #define cache_bug_on(cond, c, ...) \ argument
788 cache_bug(c, __VA_ARGS__); \
791 #define cache_set_err_on(cond, c, ...) \ argument
794 bch_cache_set_error(c, __VA_ARGS__); \
841 static inline void wake_up_allocators(struct cache_set *c) in wake_up_allocators() argument
846 for_each_cache(ca, c, i) in wake_up_allocators()
904 int bch_flash_dev_create(struct cache_set *c, uint64_t size);