Lines Matching refs:chan

84 	return chan_dev->chan;  in dev_to_dma_chan()
90 struct dma_chan *chan; in memcpy_count_show() local
96 chan = dev_to_dma_chan(dev); in memcpy_count_show()
97 if (chan) { in memcpy_count_show()
99 count += per_cpu_ptr(chan->local, i)->memcpy_count; in memcpy_count_show()
112 struct dma_chan *chan; in bytes_transferred_show() local
118 chan = dev_to_dma_chan(dev); in bytes_transferred_show()
119 if (chan) { in bytes_transferred_show()
121 count += per_cpu_ptr(chan->local, i)->bytes_transferred; in bytes_transferred_show()
134 struct dma_chan *chan; in in_use_show() local
138 chan = dev_to_dma_chan(dev); in in_use_show()
139 if (chan) in in_use_show()
140 err = sprintf(buf, "%d\n", chan->client_count); in in_use_show()
192 static struct module *dma_chan_to_owner(struct dma_chan *chan) in dma_chan_to_owner() argument
194 return chan->device->dev->driver->owner; in dma_chan_to_owner()
203 static void balance_ref_count(struct dma_chan *chan) in balance_ref_count() argument
205 struct module *owner = dma_chan_to_owner(chan); in balance_ref_count()
207 while (chan->client_count < dmaengine_ref_count) { in balance_ref_count()
209 chan->client_count++; in balance_ref_count()
219 static int dma_chan_get(struct dma_chan *chan) in dma_chan_get() argument
221 struct module *owner = dma_chan_to_owner(chan); in dma_chan_get()
225 if (chan->client_count) { in dma_chan_get()
234 if (chan->device->device_alloc_chan_resources) { in dma_chan_get()
235 ret = chan->device->device_alloc_chan_resources(chan); in dma_chan_get()
240 if (!dma_has_cap(DMA_PRIVATE, chan->device->cap_mask)) in dma_chan_get()
241 balance_ref_count(chan); in dma_chan_get()
244 chan->client_count++; in dma_chan_get()
258 static void dma_chan_put(struct dma_chan *chan) in dma_chan_put() argument
261 if (!chan->client_count) in dma_chan_put()
264 chan->client_count--; in dma_chan_put()
265 module_put(dma_chan_to_owner(chan)); in dma_chan_put()
268 if (!chan->client_count && chan->device->device_free_chan_resources) in dma_chan_put()
269 chan->device->device_free_chan_resources(chan); in dma_chan_put()
272 if (chan->router && chan->router->route_free) { in dma_chan_put()
273 chan->router->route_free(chan->router->dev, chan->route_data); in dma_chan_put()
274 chan->router = NULL; in dma_chan_put()
275 chan->route_data = NULL; in dma_chan_put()
279 enum dma_status dma_sync_wait(struct dma_chan *chan, dma_cookie_t cookie) in dma_sync_wait() argument
284 dma_async_issue_pending(chan); in dma_sync_wait()
286 status = dma_async_is_tx_complete(chan, cookie, NULL, NULL); in dma_sync_wait()
310 struct dma_chan *chan; member
357 return this_cpu_read(channel_table[tx_type]->chan); in dma_find_channel()
367 struct dma_chan *chan; in dma_issue_pending_all() local
373 list_for_each_entry(chan, &device->channels, device_node) in dma_issue_pending_all()
374 if (chan->client_count) in dma_issue_pending_all()
375 device->device_issue_pending(chan); in dma_issue_pending_all()
384 static bool dma_chan_is_local(struct dma_chan *chan, int cpu) in dma_chan_is_local() argument
386 int node = dev_to_node(chan->device->dev); in dma_chan_is_local()
403 struct dma_chan *chan; in min_chan() local
411 list_for_each_entry(chan, &device->channels, device_node) { in min_chan()
412 if (!chan->client_count) in min_chan()
414 if (!min || chan->table_count < min->table_count) in min_chan()
415 min = chan; in min_chan()
417 if (dma_chan_is_local(chan, cpu)) in min_chan()
419 chan->table_count < localmin->table_count) in min_chan()
420 localmin = chan; in min_chan()
424 chan = localmin ? localmin : min; in min_chan()
426 if (chan) in min_chan()
427 chan->table_count++; in min_chan()
429 return chan; in min_chan()
442 struct dma_chan *chan; in dma_channel_rebalance() local
450 per_cpu_ptr(channel_table[cap], cpu)->chan = NULL; in dma_channel_rebalance()
455 list_for_each_entry(chan, &device->channels, device_node) in dma_channel_rebalance()
456 chan->table_count = 0; in dma_channel_rebalance()
466 chan = min_chan(cap, cpu); in dma_channel_rebalance()
467 per_cpu_ptr(channel_table[cap], cpu)->chan = chan; in dma_channel_rebalance()
471 int dma_get_slave_caps(struct dma_chan *chan, struct dma_slave_caps *caps) in dma_get_slave_caps() argument
475 if (!chan || !caps) in dma_get_slave_caps()
478 device = chan->device; in dma_get_slave_caps()
512 struct dma_chan *chan; in private_candidate() local
522 list_for_each_entry(chan, &dev->channels, device_node) { in private_candidate()
524 if (chan->client_count) in private_candidate()
528 list_for_each_entry(chan, &dev->channels, device_node) { in private_candidate()
529 if (chan->client_count) { in private_candidate()
531 __func__, dma_chan_name(chan)); in private_candidate()
534 if (fn && !fn(chan, fn_param)) { in private_candidate()
536 __func__, dma_chan_name(chan)); in private_candidate()
539 return chan; in private_candidate()
549 struct dma_chan *dma_get_slave_channel(struct dma_chan *chan) in dma_get_slave_channel() argument
556 if (chan->client_count == 0) { in dma_get_slave_channel()
557 struct dma_device *device = chan->device; in dma_get_slave_channel()
561 err = dma_chan_get(chan); in dma_get_slave_channel()
564 __func__, dma_chan_name(chan), err); in dma_get_slave_channel()
565 chan = NULL; in dma_get_slave_channel()
570 chan = NULL; in dma_get_slave_channel()
575 return chan; in dma_get_slave_channel()
582 struct dma_chan *chan; in dma_get_any_slave_channel() local
591 chan = private_candidate(&mask, device, NULL, NULL); in dma_get_any_slave_channel()
592 if (chan) { in dma_get_any_slave_channel()
595 err = dma_chan_get(chan); in dma_get_any_slave_channel()
598 __func__, dma_chan_name(chan), err); in dma_get_any_slave_channel()
599 chan = NULL; in dma_get_any_slave_channel()
607 return chan; in dma_get_any_slave_channel()
623 struct dma_chan *chan = NULL; in __dma_request_channel() local
629 chan = private_candidate(mask, device, fn, fn_param); in __dma_request_channel()
630 if (chan) { in __dma_request_channel()
638 err = dma_chan_get(chan); in __dma_request_channel()
642 __func__, dma_chan_name(chan)); in __dma_request_channel()
646 __func__, dma_chan_name(chan), err); in __dma_request_channel()
651 chan = NULL; in __dma_request_channel()
658 chan ? "success" : "fail", in __dma_request_channel()
659 chan ? dma_chan_name(chan) : NULL); in __dma_request_channel()
661 return chan; in __dma_request_channel()
708 void dma_release_channel(struct dma_chan *chan) in dma_release_channel() argument
711 WARN_ONCE(chan->client_count != 1, in dma_release_channel()
712 "chan reference count %d != 1\n", chan->client_count); in dma_release_channel()
713 dma_chan_put(chan); in dma_release_channel()
715 if (--chan->device->privatecnt == 0) in dma_release_channel()
716 dma_cap_clear(DMA_PRIVATE, chan->device->cap_mask); in dma_release_channel()
727 struct dma_chan *chan; in dmaengine_get() local
737 list_for_each_entry(chan, &device->channels, device_node) { in dmaengine_get()
738 err = dma_chan_get(chan); in dmaengine_get()
745 __func__, dma_chan_name(chan), err); in dmaengine_get()
765 struct dma_chan *chan; in dmaengine_put() local
774 list_for_each_entry(chan, &device->channels, device_node) in dmaengine_put()
775 dma_chan_put(chan); in dmaengine_put()
841 struct dma_chan* chan; in dma_async_device_register() local
891 list_for_each_entry(chan, &device->channels, device_node) { in dma_async_device_register()
893 chan->local = alloc_percpu(typeof(*chan->local)); in dma_async_device_register()
894 if (chan->local == NULL) in dma_async_device_register()
896 chan->dev = kzalloc(sizeof(*chan->dev), GFP_KERNEL); in dma_async_device_register()
897 if (chan->dev == NULL) { in dma_async_device_register()
898 free_percpu(chan->local); in dma_async_device_register()
899 chan->local = NULL; in dma_async_device_register()
903 chan->chan_id = chancnt++; in dma_async_device_register()
904 chan->dev->device.class = &dma_devclass; in dma_async_device_register()
905 chan->dev->device.parent = device->dev; in dma_async_device_register()
906 chan->dev->chan = chan; in dma_async_device_register()
907 chan->dev->idr_ref = idr_ref; in dma_async_device_register()
908 chan->dev->dev_id = device->dev_id; in dma_async_device_register()
910 dev_set_name(&chan->dev->device, "dma%dchan%d", in dma_async_device_register()
911 device->dev_id, chan->chan_id); in dma_async_device_register()
913 rc = device_register(&chan->dev->device); in dma_async_device_register()
915 free_percpu(chan->local); in dma_async_device_register()
916 chan->local = NULL; in dma_async_device_register()
917 kfree(chan->dev); in dma_async_device_register()
921 chan->client_count = 0; in dma_async_device_register()
928 list_for_each_entry(chan, &device->channels, device_node) { in dma_async_device_register()
932 if (dma_chan_get(chan) == -ENODEV) { in dma_async_device_register()
960 list_for_each_entry(chan, &device->channels, device_node) { in dma_async_device_register()
961 if (chan->local == NULL) in dma_async_device_register()
964 chan->dev->chan = NULL; in dma_async_device_register()
966 device_unregister(&chan->dev->device); in dma_async_device_register()
967 free_percpu(chan->local); in dma_async_device_register()
982 struct dma_chan *chan; in dma_async_device_unregister() local
989 list_for_each_entry(chan, &device->channels, device_node) { in dma_async_device_unregister()
990 WARN_ONCE(chan->client_count, in dma_async_device_unregister()
992 __func__, chan->client_count); in dma_async_device_unregister()
994 chan->dev->chan = NULL; in dma_async_device_unregister()
996 device_unregister(&chan->dev->device); in dma_async_device_unregister()
997 free_percpu(chan->local); in dma_async_device_unregister()
1130 struct dma_chan *chan) in dma_async_tx_descriptor_init() argument
1132 tx->chan = chan; in dma_async_tx_descriptor_init()
1158 return dma_sync_wait(tx->chan, tx->cookie); in dma_wait_for_async_tx()
1170 struct dma_chan *chan; in dma_run_dependencies() local
1177 chan = dep->chan; in dma_run_dependencies()
1187 if (dep_next && dep_next->chan == chan) in dma_run_dependencies()
1196 chan->device->device_issue_pending(chan); in dma_run_dependencies()