Lines Matching refs:device

79 static struct dma_chan *dev_to_dma_chan(struct device *dev)  in dev_to_dma_chan()
83 chan_dev = container_of(dev, typeof(*chan_dev), device); in dev_to_dma_chan()
87 static ssize_t memcpy_count_show(struct device *dev, in memcpy_count_show()
109 static ssize_t bytes_transferred_show(struct device *dev, in bytes_transferred_show()
131 static ssize_t in_use_show(struct device *dev, struct device_attribute *attr, in in_use_show()
157 static void chan_dev_release(struct device *dev) in chan_dev_release()
161 chan_dev = container_of(dev, typeof(*chan_dev), device); in chan_dev_release()
179 #define dma_device_satisfies_mask(device, mask) \ argument
180 __dma_device_satisfies_mask((device), &(mask))
182 __dma_device_satisfies_mask(struct dma_device *device, in __dma_device_satisfies_mask() argument
187 bitmap_and(has.bits, want->bits, device->cap_mask.bits, in __dma_device_satisfies_mask()
194 return chan->device->dev->driver->owner; in dma_chan_to_owner()
234 if (chan->device->device_alloc_chan_resources) { in dma_chan_get()
235 ret = chan->device->device_alloc_chan_resources(chan); in dma_chan_get()
240 if (!dma_has_cap(DMA_PRIVATE, chan->device->cap_mask)) in dma_chan_get()
268 if (!chan->client_count && chan->device->device_free_chan_resources) in dma_chan_put()
269 chan->device->device_free_chan_resources(chan); in dma_chan_put()
359 struct dma_device *device; in dma_issue_pending_all() local
363 list_for_each_entry_rcu(device, &dma_device_list, global_node) { in dma_issue_pending_all()
364 if (dma_has_cap(DMA_PRIVATE, device->cap_mask)) in dma_issue_pending_all()
366 list_for_each_entry(chan, &device->channels, device_node) in dma_issue_pending_all()
368 device->device_issue_pending(chan); in dma_issue_pending_all()
379 int node = dev_to_node(chan->device->dev); in dma_chan_is_local()
395 struct dma_device *device; in min_chan() local
400 list_for_each_entry(device, &dma_device_list, global_node) { in min_chan()
401 if (!dma_has_cap(cap, device->cap_mask) || in min_chan()
402 dma_has_cap(DMA_PRIVATE, device->cap_mask)) in min_chan()
404 list_for_each_entry(chan, &device->channels, device_node) { in min_chan()
436 struct dma_device *device; in dma_channel_rebalance() local
445 list_for_each_entry(device, &dma_device_list, global_node) { in dma_channel_rebalance()
446 if (dma_has_cap(DMA_PRIVATE, device->cap_mask)) in dma_channel_rebalance()
448 list_for_each_entry(chan, &device->channels, device_node) in dma_channel_rebalance()
466 struct dma_device *device; in dma_get_slave_caps() local
471 device = chan->device; in dma_get_slave_caps()
474 if (!test_bit(DMA_SLAVE, device->cap_mask.bits)) in dma_get_slave_caps()
482 if (!device->directions) in dma_get_slave_caps()
485 caps->src_addr_widths = device->src_addr_widths; in dma_get_slave_caps()
486 caps->dst_addr_widths = device->dst_addr_widths; in dma_get_slave_caps()
487 caps->directions = device->directions; in dma_get_slave_caps()
488 caps->residue_granularity = device->residue_granularity; in dma_get_slave_caps()
494 caps->cmd_pause = !!(device->device_pause && device->device_resume); in dma_get_slave_caps()
495 caps->cmd_terminate = !!device->device_terminate_all; in dma_get_slave_caps()
564 struct dma_chan *dma_get_any_slave_channel(struct dma_device *device) in dma_get_any_slave_channel() argument
576 chan = private_candidate(&mask, device, NULL, NULL); in dma_get_any_slave_channel()
578 dma_cap_set(DMA_PRIVATE, device->cap_mask); in dma_get_any_slave_channel()
579 device->privatecnt++; in dma_get_any_slave_channel()
585 if (--device->privatecnt == 0) in dma_get_any_slave_channel()
586 dma_cap_clear(DMA_PRIVATE, device->cap_mask); in dma_get_any_slave_channel()
607 struct dma_device *device, *_d; in __dma_request_channel() local
613 list_for_each_entry_safe(device, _d, &dma_device_list, global_node) { in __dma_request_channel()
614 chan = private_candidate(mask, device, fn, fn_param); in __dma_request_channel()
621 dma_cap_set(DMA_PRIVATE, device->cap_mask); in __dma_request_channel()
622 device->privatecnt++; in __dma_request_channel()
628 list_del_rcu(&device->global_node); in __dma_request_channel()
634 if (--device->privatecnt == 0) in __dma_request_channel()
635 dma_cap_clear(DMA_PRIVATE, device->cap_mask); in __dma_request_channel()
657 struct dma_chan *dma_request_slave_channel_reason(struct device *dev, in dma_request_slave_channel_reason()
679 struct dma_chan *dma_request_slave_channel(struct device *dev, in dma_request_slave_channel()
696 if (--chan->device->privatecnt == 0) in dma_release_channel()
697 dma_cap_clear(DMA_PRIVATE, chan->device->cap_mask); in dma_release_channel()
707 struct dma_device *device, *_d; in dmaengine_get() local
715 list_for_each_entry_safe(device, _d, &dma_device_list, global_node) { in dmaengine_get()
716 if (dma_has_cap(DMA_PRIVATE, device->cap_mask)) in dmaengine_get()
718 list_for_each_entry(chan, &device->channels, device_node) { in dmaengine_get()
722 list_del_rcu(&device->global_node); in dmaengine_get()
745 struct dma_device *device; in dmaengine_put() local
752 list_for_each_entry(device, &dma_device_list, global_node) { in dmaengine_put()
753 if (dma_has_cap(DMA_PRIVATE, device->cap_mask)) in dmaengine_put()
755 list_for_each_entry(chan, &device->channels, device_node) in dmaengine_put()
762 static bool device_has_all_tx_types(struct dma_device *device) in device_has_all_tx_types() argument
769 if (!dma_has_cap(DMA_INTERRUPT, device->cap_mask)) in device_has_all_tx_types()
774 if (!dma_has_cap(DMA_MEMCPY, device->cap_mask)) in device_has_all_tx_types()
779 if (!dma_has_cap(DMA_XOR, device->cap_mask)) in device_has_all_tx_types()
783 if (!dma_has_cap(DMA_XOR_VAL, device->cap_mask)) in device_has_all_tx_types()
789 if (!dma_has_cap(DMA_PQ, device->cap_mask)) in device_has_all_tx_types()
793 if (!dma_has_cap(DMA_PQ_VAL, device->cap_mask)) in device_has_all_tx_types()
801 static int get_dma_id(struct dma_device *device) in get_dma_id() argument
809 device->dev_id = rc; in get_dma_id()
819 int dma_async_device_register(struct dma_device *device) in dma_async_device_register() argument
825 if (!device) in dma_async_device_register()
829 BUG_ON(dma_has_cap(DMA_MEMCPY, device->cap_mask) && in dma_async_device_register()
830 !device->device_prep_dma_memcpy); in dma_async_device_register()
831 BUG_ON(dma_has_cap(DMA_XOR, device->cap_mask) && in dma_async_device_register()
832 !device->device_prep_dma_xor); in dma_async_device_register()
833 BUG_ON(dma_has_cap(DMA_XOR_VAL, device->cap_mask) && in dma_async_device_register()
834 !device->device_prep_dma_xor_val); in dma_async_device_register()
835 BUG_ON(dma_has_cap(DMA_PQ, device->cap_mask) && in dma_async_device_register()
836 !device->device_prep_dma_pq); in dma_async_device_register()
837 BUG_ON(dma_has_cap(DMA_PQ_VAL, device->cap_mask) && in dma_async_device_register()
838 !device->device_prep_dma_pq_val); in dma_async_device_register()
839 BUG_ON(dma_has_cap(DMA_INTERRUPT, device->cap_mask) && in dma_async_device_register()
840 !device->device_prep_dma_interrupt); in dma_async_device_register()
841 BUG_ON(dma_has_cap(DMA_SG, device->cap_mask) && in dma_async_device_register()
842 !device->device_prep_dma_sg); in dma_async_device_register()
843 BUG_ON(dma_has_cap(DMA_CYCLIC, device->cap_mask) && in dma_async_device_register()
844 !device->device_prep_dma_cyclic); in dma_async_device_register()
845 BUG_ON(dma_has_cap(DMA_INTERLEAVE, device->cap_mask) && in dma_async_device_register()
846 !device->device_prep_interleaved_dma); in dma_async_device_register()
848 BUG_ON(!device->device_tx_status); in dma_async_device_register()
849 BUG_ON(!device->device_issue_pending); in dma_async_device_register()
850 BUG_ON(!device->dev); in dma_async_device_register()
855 if (device_has_all_tx_types(device)) in dma_async_device_register()
856 dma_cap_set(DMA_ASYNC_TX, device->cap_mask); in dma_async_device_register()
861 rc = get_dma_id(device); in dma_async_device_register()
870 list_for_each_entry(chan, &device->channels, device_node) { in dma_async_device_register()
883 chan->dev->device.class = &dma_devclass; in dma_async_device_register()
884 chan->dev->device.parent = device->dev; in dma_async_device_register()
887 chan->dev->dev_id = device->dev_id; in dma_async_device_register()
889 dev_set_name(&chan->dev->device, "dma%dchan%d", in dma_async_device_register()
890 device->dev_id, chan->chan_id); in dma_async_device_register()
892 rc = device_register(&chan->dev->device); in dma_async_device_register()
902 device->chancnt = chancnt; in dma_async_device_register()
906 if (dmaengine_ref_count && !dma_has_cap(DMA_PRIVATE, device->cap_mask)) in dma_async_device_register()
907 list_for_each_entry(chan, &device->channels, device_node) { in dma_async_device_register()
921 list_add_tail_rcu(&device->global_node, &dma_device_list); in dma_async_device_register()
922 if (dma_has_cap(DMA_PRIVATE, device->cap_mask)) in dma_async_device_register()
923 device->privatecnt++; /* Always private */ in dma_async_device_register()
933 idr_remove(&dma_idr, device->dev_id); in dma_async_device_register()
939 list_for_each_entry(chan, &device->channels, device_node) { in dma_async_device_register()
945 device_unregister(&chan->dev->device); in dma_async_device_register()
959 void dma_async_device_unregister(struct dma_device *device) in dma_async_device_unregister() argument
964 list_del_rcu(&device->global_node); in dma_async_device_unregister()
968 list_for_each_entry(chan, &device->channels, device_node) { in dma_async_device_unregister()
975 device_unregister(&chan->dev->device); in dma_async_device_unregister()
1020 struct device *dev = unmap->dev; in dmaengine_unmap()
1093 dmaengine_get_unmap_data(struct device *dev, int nr, gfp_t flags) in dmaengine_get_unmap_data()
1177 chan->device->device_issue_pending(chan); in dma_run_dependencies()