Lines Matching refs:bt
22 static bool bt_has_free_tags(struct blk_mq_bitmap_tags *bt) in bt_has_free_tags() argument
26 for (i = 0; i < bt->map_nr; i++) { in bt_has_free_tags()
27 struct blk_align_bitmap *bm = &bt->map[i]; in bt_has_free_tags()
75 struct blk_mq_bitmap_tags *bt; in blk_mq_tag_wakeup_all() local
78 bt = &tags->bitmap_tags; in blk_mq_tag_wakeup_all()
79 wake_index = atomic_read(&bt->wake_index); in blk_mq_tag_wakeup_all()
81 struct bt_wait_state *bs = &bt->bs[wake_index]; in blk_mq_tag_wakeup_all()
90 bt = &tags->breserved_tags; in blk_mq_tag_wakeup_all()
91 if (waitqueue_active(&bt->bs[0].wait)) in blk_mq_tag_wakeup_all()
92 wake_up(&bt->bs[0].wait); in blk_mq_tag_wakeup_all()
117 struct blk_mq_bitmap_tags *bt) in hctx_may_queue() argument
129 if (bt->depth == 1) in hctx_may_queue()
139 depth = max((bt->depth + users - 1) / users, 4U); in hctx_may_queue()
187 static int __bt_get(struct blk_mq_hw_ctx *hctx, struct blk_mq_bitmap_tags *bt, in __bt_get() argument
193 if (!hctx_may_queue(hctx, bt)) in __bt_get()
197 index = TAG_TO_INDEX(bt, last_tag); in __bt_get()
199 for (i = 0; i < bt->map_nr; i++) { in __bt_get()
200 tag = __bt_get_word(&bt->map[index], TAG_TO_BIT(bt, last_tag), in __bt_get()
203 tag += (index << bt->bits_per_word); in __bt_get()
212 last_tag = (index << bt->bits_per_word); in __bt_get()
214 if (index >= bt->map_nr) { in __bt_get()
230 if (last_tag >= bt->depth - 1) in __bt_get()
239 static struct bt_wait_state *bt_wait_ptr(struct blk_mq_bitmap_tags *bt, in bt_wait_ptr() argument
246 return &bt->bs[0]; in bt_wait_ptr()
249 bs = &bt->bs[wait_index]; in bt_wait_ptr()
255 struct blk_mq_bitmap_tags *bt, in bt_get() argument
263 tag = __bt_get(hctx, bt, last_tag, tags); in bt_get()
270 bs = bt_wait_ptr(bt, hctx); in bt_get()
274 tag = __bt_get(hctx, bt, last_tag, tags); in bt_get()
291 tag = __bt_get(hctx, bt, last_tag, tags); in bt_get()
303 bt = &data->hctx->tags->breserved_tags; in bt_get()
307 bt = &hctx->tags->bitmap_tags; in bt_get()
310 bs = bt_wait_ptr(bt, hctx); in bt_get()
354 static struct bt_wait_state *bt_wake_ptr(struct blk_mq_bitmap_tags *bt) in bt_wake_ptr() argument
358 wake_index = atomic_read(&bt->wake_index); in bt_wake_ptr()
360 struct bt_wait_state *bs = &bt->bs[wake_index]; in bt_wake_ptr()
363 int o = atomic_read(&bt->wake_index); in bt_wake_ptr()
365 atomic_cmpxchg(&bt->wake_index, o, wake_index); in bt_wake_ptr()
376 static void bt_clear_tag(struct blk_mq_bitmap_tags *bt, unsigned int tag) in bt_clear_tag() argument
378 const int index = TAG_TO_INDEX(bt, tag); in bt_clear_tag()
382 clear_bit(TAG_TO_BIT(bt, tag), &bt->map[index].word); in bt_clear_tag()
387 bs = bt_wake_ptr(bt); in bt_clear_tag()
395 atomic_add(bt->wake_cnt, &bs->wait_cnt); in bt_clear_tag()
396 bt_index_atomic_inc(&bt->wake_index); in bt_clear_tag()
420 struct blk_mq_bitmap_tags *bt, unsigned int off, in bt_for_each() argument
426 for (i = 0; i < bt->map_nr; i++) { in bt_for_each()
427 struct blk_align_bitmap *bm = &bt->map[i]; in bt_for_each()
437 off += (1 << bt->bits_per_word); in bt_for_each()
453 static unsigned int bt_unused_tags(struct blk_mq_bitmap_tags *bt) in bt_unused_tags() argument
457 for (i = 0, used = 0; i < bt->map_nr; i++) { in bt_unused_tags()
458 struct blk_align_bitmap *bm = &bt->map[i]; in bt_unused_tags()
463 return bt->depth - used; in bt_unused_tags()
466 static void bt_update_count(struct blk_mq_bitmap_tags *bt, in bt_update_count() argument
469 unsigned int tags_per_word = 1U << bt->bits_per_word; in bt_update_count()
475 for (i = 0; i < bt->map_nr; i++) { in bt_update_count()
476 bt->map[i].depth = min(map_depth, tags_per_word); in bt_update_count()
477 map_depth -= bt->map[i].depth; in bt_update_count()
481 bt->wake_cnt = BT_WAIT_BATCH; in bt_update_count()
482 if (bt->wake_cnt > depth / BT_WAIT_QUEUES) in bt_update_count()
483 bt->wake_cnt = max(1U, depth / BT_WAIT_QUEUES); in bt_update_count()
485 bt->depth = depth; in bt_update_count()
488 static int bt_alloc(struct blk_mq_bitmap_tags *bt, unsigned int depth, in bt_alloc() argument
493 bt->bits_per_word = ilog2(BITS_PER_LONG); in bt_alloc()
502 tags_per_word = (1 << bt->bits_per_word); in bt_alloc()
512 bt->bits_per_word--; in bt_alloc()
513 tags_per_word = (1 << bt->bits_per_word); in bt_alloc()
518 bt->map = kzalloc_node(nr * sizeof(struct blk_align_bitmap), in bt_alloc()
520 if (!bt->map) in bt_alloc()
523 bt->map_nr = nr; in bt_alloc()
526 bt->bs = kzalloc(BT_WAIT_QUEUES * sizeof(*bt->bs), GFP_KERNEL); in bt_alloc()
527 if (!bt->bs) { in bt_alloc()
528 kfree(bt->map); in bt_alloc()
529 bt->map = NULL; in bt_alloc()
533 bt_update_count(bt, depth); in bt_alloc()
536 init_waitqueue_head(&bt->bs[i].wait); in bt_alloc()
537 atomic_set(&bt->bs[i].wait_cnt, bt->wake_cnt); in bt_alloc()
543 static void bt_free(struct blk_mq_bitmap_tags *bt) in bt_free() argument
545 kfree(bt->map); in bt_free()
546 kfree(bt->bs); in bt_free()