Lines Matching refs:bt

22 static bool bt_has_free_tags(struct blk_mq_bitmap_tags *bt)  in bt_has_free_tags()  argument
26 for (i = 0; i < bt->map_nr; i++) { in bt_has_free_tags()
27 struct blk_align_bitmap *bm = &bt->map[i]; in bt_has_free_tags()
75 struct blk_mq_bitmap_tags *bt; in blk_mq_tag_wakeup_all() local
82 bt = &tags->bitmap_tags; in blk_mq_tag_wakeup_all()
83 wake_index = atomic_read(&bt->wake_index); in blk_mq_tag_wakeup_all()
85 struct bt_wait_state *bs = &bt->bs[wake_index]; in blk_mq_tag_wakeup_all()
94 bt = &tags->breserved_tags; in blk_mq_tag_wakeup_all()
95 if (waitqueue_active(&bt->bs[0].wait)) in blk_mq_tag_wakeup_all()
96 wake_up(&bt->bs[0].wait); in blk_mq_tag_wakeup_all()
121 struct blk_mq_bitmap_tags *bt) in hctx_may_queue() argument
133 if (bt->depth == 1) in hctx_may_queue()
143 depth = max((bt->depth + users - 1) / users, 4U); in hctx_may_queue()
191 static int __bt_get(struct blk_mq_hw_ctx *hctx, struct blk_mq_bitmap_tags *bt, in __bt_get() argument
197 if (!hctx_may_queue(hctx, bt)) in __bt_get()
201 index = TAG_TO_INDEX(bt, last_tag); in __bt_get()
203 for (i = 0; i < bt->map_nr; i++) { in __bt_get()
204 tag = __bt_get_word(&bt->map[index], TAG_TO_BIT(bt, last_tag), in __bt_get()
207 tag += (index << bt->bits_per_word); in __bt_get()
216 last_tag = (index << bt->bits_per_word); in __bt_get()
218 if (index >= bt->map_nr) { in __bt_get()
234 if (last_tag >= bt->depth - 1) in __bt_get()
243 static struct bt_wait_state *bt_wait_ptr(struct blk_mq_bitmap_tags *bt, in bt_wait_ptr() argument
250 return &bt->bs[0]; in bt_wait_ptr()
253 bs = &bt->bs[wait_index]; in bt_wait_ptr()
259 struct blk_mq_bitmap_tags *bt, in bt_get() argument
267 tag = __bt_get(hctx, bt, last_tag, tags); in bt_get()
274 bs = bt_wait_ptr(bt, hctx); in bt_get()
278 tag = __bt_get(hctx, bt, last_tag, tags); in bt_get()
295 tag = __bt_get(hctx, bt, last_tag, tags); in bt_get()
307 bt = &data->hctx->tags->breserved_tags; in bt_get()
311 bt = &hctx->tags->bitmap_tags; in bt_get()
314 bs = bt_wait_ptr(bt, hctx); in bt_get()
358 static struct bt_wait_state *bt_wake_ptr(struct blk_mq_bitmap_tags *bt) in bt_wake_ptr() argument
362 wake_index = atomic_read(&bt->wake_index); in bt_wake_ptr()
364 struct bt_wait_state *bs = &bt->bs[wake_index]; in bt_wake_ptr()
367 int o = atomic_read(&bt->wake_index); in bt_wake_ptr()
369 atomic_cmpxchg(&bt->wake_index, o, wake_index); in bt_wake_ptr()
380 static void bt_clear_tag(struct blk_mq_bitmap_tags *bt, unsigned int tag) in bt_clear_tag() argument
382 const int index = TAG_TO_INDEX(bt, tag); in bt_clear_tag()
386 clear_bit(TAG_TO_BIT(bt, tag), &bt->map[index].word); in bt_clear_tag()
391 bs = bt_wake_ptr(bt); in bt_clear_tag()
399 atomic_add(bt->wake_cnt, &bs->wait_cnt); in bt_clear_tag()
400 bt_index_atomic_inc(&bt->wake_index); in bt_clear_tag()
424 struct blk_mq_bitmap_tags *bt, unsigned int off, in bt_for_each() argument
430 for (i = 0; i < bt->map_nr; i++) { in bt_for_each()
431 struct blk_align_bitmap *bm = &bt->map[i]; in bt_for_each()
441 off += (1 << bt->bits_per_word); in bt_for_each()
446 struct blk_mq_bitmap_tags *bt, unsigned int off, in bt_tags_for_each() argument
454 for (i = 0; i < bt->map_nr; i++) { in bt_tags_for_each()
455 struct blk_align_bitmap *bm = &bt->map[i]; in bt_tags_for_each()
464 off += (1 << bt->bits_per_word); in bt_tags_for_each()
503 static unsigned int bt_unused_tags(struct blk_mq_bitmap_tags *bt) in bt_unused_tags() argument
507 for (i = 0, used = 0; i < bt->map_nr; i++) { in bt_unused_tags()
508 struct blk_align_bitmap *bm = &bt->map[i]; in bt_unused_tags()
513 return bt->depth - used; in bt_unused_tags()
516 static void bt_update_count(struct blk_mq_bitmap_tags *bt, in bt_update_count() argument
519 unsigned int tags_per_word = 1U << bt->bits_per_word; in bt_update_count()
525 for (i = 0; i < bt->map_nr; i++) { in bt_update_count()
526 bt->map[i].depth = min(map_depth, tags_per_word); in bt_update_count()
527 map_depth -= bt->map[i].depth; in bt_update_count()
531 bt->wake_cnt = BT_WAIT_BATCH; in bt_update_count()
532 if (bt->wake_cnt > depth / BT_WAIT_QUEUES) in bt_update_count()
533 bt->wake_cnt = max(1U, depth / BT_WAIT_QUEUES); in bt_update_count()
535 bt->depth = depth; in bt_update_count()
538 static int bt_alloc(struct blk_mq_bitmap_tags *bt, unsigned int depth, in bt_alloc() argument
543 bt->bits_per_word = ilog2(BITS_PER_LONG); in bt_alloc()
552 tags_per_word = (1 << bt->bits_per_word); in bt_alloc()
562 bt->bits_per_word--; in bt_alloc()
563 tags_per_word = (1 << bt->bits_per_word); in bt_alloc()
568 bt->map = kzalloc_node(nr * sizeof(struct blk_align_bitmap), in bt_alloc()
570 if (!bt->map) in bt_alloc()
573 bt->map_nr = nr; in bt_alloc()
576 bt->bs = kzalloc(BT_WAIT_QUEUES * sizeof(*bt->bs), GFP_KERNEL); in bt_alloc()
577 if (!bt->bs) { in bt_alloc()
578 kfree(bt->map); in bt_alloc()
579 bt->map = NULL; in bt_alloc()
583 bt_update_count(bt, depth); in bt_alloc()
586 init_waitqueue_head(&bt->bs[i].wait); in bt_alloc()
587 atomic_set(&bt->bs[i].wait_cnt, bt->wake_cnt); in bt_alloc()
593 static void bt_free(struct blk_mq_bitmap_tags *bt) in bt_free() argument
595 kfree(bt->map); in bt_free()
596 kfree(bt->bs); in bt_free()