Lines Matching refs:icq

33 	struct io_cq *icq = container_of(head, struct io_cq, __rcu_head);  in icq_free_icq_rcu()  local
35 kmem_cache_free(icq->__rcu_icq_cache, icq); in icq_free_icq_rcu()
39 static void ioc_exit_icq(struct io_cq *icq) in ioc_exit_icq() argument
41 struct elevator_type *et = icq->q->elevator->type; in ioc_exit_icq()
43 if (icq->flags & ICQ_EXITED) in ioc_exit_icq()
47 et->ops.elevator_exit_icq_fn(icq); in ioc_exit_icq()
49 icq->flags |= ICQ_EXITED; in ioc_exit_icq()
53 static void ioc_destroy_icq(struct io_cq *icq) in ioc_destroy_icq() argument
55 struct io_context *ioc = icq->ioc; in ioc_destroy_icq()
56 struct request_queue *q = icq->q; in ioc_destroy_icq()
62 radix_tree_delete(&ioc->icq_tree, icq->q->id); in ioc_destroy_icq()
63 hlist_del_init(&icq->ioc_node); in ioc_destroy_icq()
64 list_del_init(&icq->q_node); in ioc_destroy_icq()
71 if (rcu_access_pointer(ioc->icq_hint) == icq) in ioc_destroy_icq()
74 ioc_exit_icq(icq); in ioc_destroy_icq()
80 icq->__rcu_icq_cache = et->icq_cache; in ioc_destroy_icq()
81 call_rcu(&icq->__rcu_head, icq_free_icq_rcu); in ioc_destroy_icq()
103 struct io_cq *icq = hlist_entry(ioc->icq_list.first, in ioc_release_fn() local
105 struct request_queue *q = icq->q; in ioc_release_fn()
108 ioc_destroy_icq(icq); in ioc_release_fn()
168 struct io_cq *icq; in put_io_context_active() local
182 hlist_for_each_entry(icq, &ioc->icq_list, ioc_node) { in put_io_context_active()
183 if (icq->flags & ICQ_EXITED) in put_io_context_active()
185 if (spin_trylock(icq->q->queue_lock)) { in put_io_context_active()
186 ioc_exit_icq(icq); in put_io_context_active()
187 spin_unlock(icq->q->queue_lock); in put_io_context_active()
224 struct io_cq *icq = list_entry(q->icq_list.next, in ioc_clear_queue() local
226 struct io_context *ioc = icq->ioc; in ioc_clear_queue()
229 ioc_destroy_icq(icq); in ioc_clear_queue()
319 struct io_cq *icq; in ioc_lookup_icq() local
330 icq = rcu_dereference(ioc->icq_hint); in ioc_lookup_icq()
331 if (icq && icq->q == q) in ioc_lookup_icq()
334 icq = radix_tree_lookup(&ioc->icq_tree, q->id); in ioc_lookup_icq()
335 if (icq && icq->q == q) in ioc_lookup_icq()
336 rcu_assign_pointer(ioc->icq_hint, icq); /* allowed to race */ in ioc_lookup_icq()
338 icq = NULL; in ioc_lookup_icq()
341 return icq; in ioc_lookup_icq()
361 struct io_cq *icq; in ioc_create_icq() local
364 icq = kmem_cache_alloc_node(et->icq_cache, gfp_mask | __GFP_ZERO, in ioc_create_icq()
366 if (!icq) in ioc_create_icq()
370 kmem_cache_free(et->icq_cache, icq); in ioc_create_icq()
374 icq->ioc = ioc; in ioc_create_icq()
375 icq->q = q; in ioc_create_icq()
376 INIT_LIST_HEAD(&icq->q_node); in ioc_create_icq()
377 INIT_HLIST_NODE(&icq->ioc_node); in ioc_create_icq()
383 if (likely(!radix_tree_insert(&ioc->icq_tree, q->id, icq))) { in ioc_create_icq()
384 hlist_add_head(&icq->ioc_node, &ioc->icq_list); in ioc_create_icq()
385 list_add(&icq->q_node, &q->icq_list); in ioc_create_icq()
387 et->ops.elevator_init_icq_fn(icq); in ioc_create_icq()
389 kmem_cache_free(et->icq_cache, icq); in ioc_create_icq()
390 icq = ioc_lookup_icq(ioc, q); in ioc_create_icq()
391 if (!icq) in ioc_create_icq()
398 return icq; in ioc_create_icq()