Lines Matching refs:k
39 static void bio_csum(struct bio *bio, struct bkey *k) in bio_csum() argument
51 k->ptr[KEY_PTRS(k)] = csum & (~0ULL >> 1); in bio_csum()
215 struct bkey *k; in bch_data_insert_start() local
226 k = op->insert_keys.top; in bch_data_insert_start()
227 bkey_init(k); in bch_data_insert_start()
228 SET_KEY_INODE(k, op->inode); in bch_data_insert_start()
229 SET_KEY_OFFSET(k, bio->bi_iter.bi_sector); in bch_data_insert_start()
231 if (!bch_alloc_sectors(op->c, k, bio_sectors(bio), in bch_data_insert_start()
236 n = bio_next_split(bio, KEY_SIZE(k), GFP_NOIO, split); in bch_data_insert_start()
242 SET_KEY_DIRTY(k, true); in bch_data_insert_start()
244 for (i = 0; i < KEY_PTRS(k); i++) in bch_data_insert_start()
245 SET_GC_MARK(PTR_BUCKET(op->c, k, i), in bch_data_insert_start()
249 SET_KEY_CSUM(k, op->csum); in bch_data_insert_start()
250 if (KEY_CSUM(k)) in bch_data_insert_start()
251 bio_csum(n, k); in bch_data_insert_start()
253 trace_bcache_cache_insert(k); in bch_data_insert_start()
257 bch_submit_bbio(n, op->c, k, 0); in bch_data_insert_start()
366 static struct hlist_head *iohash(struct cached_dev *dc, uint64_t k) in iohash() argument
368 return &dc->io_hash[hash_64(k, RECENT_IO_BITS)]; in iohash()
508 static int cache_lookup_fn(struct btree_op *op, struct btree *b, struct bkey *k) in cache_lookup_fn() argument
515 if (bkey_cmp(k, &KEY(s->iop.inode, bio->bi_iter.bi_sector, 0)) <= 0) in cache_lookup_fn()
518 if (KEY_INODE(k) != s->iop.inode || in cache_lookup_fn()
519 KEY_START(k) > bio->bi_iter.bi_sector) { in cache_lookup_fn()
521 unsigned sectors = KEY_INODE(k) == s->iop.inode in cache_lookup_fn()
523 KEY_START(k) - bio->bi_iter.bi_sector) in cache_lookup_fn()
534 if (!KEY_SIZE(k)) in cache_lookup_fn()
540 PTR_BUCKET(b->c, k, ptr)->prio = INITIAL_PRIO; in cache_lookup_fn()
542 if (KEY_DIRTY(k)) in cache_lookup_fn()
546 KEY_OFFSET(k) - bio->bi_iter.bi_sector), in cache_lookup_fn()
550 bch_bkey_copy_single_ptr(bio_key, k, ptr); in cache_lookup_fn()