Lines Matching refs:device

79 static struct dma_chan *dev_to_dma_chan(struct device *dev)  in dev_to_dma_chan()
83 chan_dev = container_of(dev, typeof(*chan_dev), device); in dev_to_dma_chan()
87 static ssize_t memcpy_count_show(struct device *dev, in memcpy_count_show()
109 static ssize_t bytes_transferred_show(struct device *dev, in bytes_transferred_show()
131 static ssize_t in_use_show(struct device *dev, struct device_attribute *attr, in in_use_show()
157 static void chan_dev_release(struct device *dev) in chan_dev_release()
161 chan_dev = container_of(dev, typeof(*chan_dev), device); in chan_dev_release()
179 #define dma_device_satisfies_mask(device, mask) \ argument
180 __dma_device_satisfies_mask((device), &(mask))
182 __dma_device_satisfies_mask(struct dma_device *device, in __dma_device_satisfies_mask() argument
187 bitmap_and(has.bits, want->bits, device->cap_mask.bits, in __dma_device_satisfies_mask()
194 return chan->device->dev->driver->owner; in dma_chan_to_owner()
234 if (chan->device->device_alloc_chan_resources) { in dma_chan_get()
235 ret = chan->device->device_alloc_chan_resources(chan); in dma_chan_get()
240 if (!dma_has_cap(DMA_PRIVATE, chan->device->cap_mask)) in dma_chan_get()
268 if (!chan->client_count && chan->device->device_free_chan_resources) in dma_chan_put()
269 chan->device->device_free_chan_resources(chan); in dma_chan_put()
366 struct dma_device *device; in dma_issue_pending_all() local
370 list_for_each_entry_rcu(device, &dma_device_list, global_node) { in dma_issue_pending_all()
371 if (dma_has_cap(DMA_PRIVATE, device->cap_mask)) in dma_issue_pending_all()
373 list_for_each_entry(chan, &device->channels, device_node) in dma_issue_pending_all()
375 device->device_issue_pending(chan); in dma_issue_pending_all()
386 int node = dev_to_node(chan->device->dev); in dma_chan_is_local()
402 struct dma_device *device; in min_chan() local
407 list_for_each_entry(device, &dma_device_list, global_node) { in min_chan()
408 if (!dma_has_cap(cap, device->cap_mask) || in min_chan()
409 dma_has_cap(DMA_PRIVATE, device->cap_mask)) in min_chan()
411 list_for_each_entry(chan, &device->channels, device_node) { in min_chan()
443 struct dma_device *device; in dma_channel_rebalance() local
452 list_for_each_entry(device, &dma_device_list, global_node) { in dma_channel_rebalance()
453 if (dma_has_cap(DMA_PRIVATE, device->cap_mask)) in dma_channel_rebalance()
455 list_for_each_entry(chan, &device->channels, device_node) in dma_channel_rebalance()
473 struct dma_device *device; in dma_get_slave_caps() local
478 device = chan->device; in dma_get_slave_caps()
481 if (!test_bit(DMA_SLAVE, device->cap_mask.bits)) in dma_get_slave_caps()
489 if (!device->directions) in dma_get_slave_caps()
492 caps->src_addr_widths = device->src_addr_widths; in dma_get_slave_caps()
493 caps->dst_addr_widths = device->dst_addr_widths; in dma_get_slave_caps()
494 caps->directions = device->directions; in dma_get_slave_caps()
495 caps->residue_granularity = device->residue_granularity; in dma_get_slave_caps()
501 caps->cmd_pause = !!(device->device_pause && device->device_resume); in dma_get_slave_caps()
502 caps->cmd_terminate = !!device->device_terminate_all; in dma_get_slave_caps()
557 struct dma_device *device = chan->device; in dma_get_slave_channel() local
559 dma_cap_set(DMA_PRIVATE, device->cap_mask); in dma_get_slave_channel()
560 device->privatecnt++; in dma_get_slave_channel()
566 if (--device->privatecnt == 0) in dma_get_slave_channel()
567 dma_cap_clear(DMA_PRIVATE, device->cap_mask); in dma_get_slave_channel()
579 struct dma_chan *dma_get_any_slave_channel(struct dma_device *device) in dma_get_any_slave_channel() argument
591 chan = private_candidate(&mask, device, NULL, NULL); in dma_get_any_slave_channel()
593 dma_cap_set(DMA_PRIVATE, device->cap_mask); in dma_get_any_slave_channel()
594 device->privatecnt++; in dma_get_any_slave_channel()
600 if (--device->privatecnt == 0) in dma_get_any_slave_channel()
601 dma_cap_clear(DMA_PRIVATE, device->cap_mask); in dma_get_any_slave_channel()
622 struct dma_device *device, *_d; in __dma_request_channel() local
628 list_for_each_entry_safe(device, _d, &dma_device_list, global_node) { in __dma_request_channel()
629 chan = private_candidate(mask, device, fn, fn_param); in __dma_request_channel()
636 dma_cap_set(DMA_PRIVATE, device->cap_mask); in __dma_request_channel()
637 device->privatecnt++; in __dma_request_channel()
643 list_del_rcu(&device->global_node); in __dma_request_channel()
649 if (--device->privatecnt == 0) in __dma_request_channel()
650 dma_cap_clear(DMA_PRIVATE, device->cap_mask); in __dma_request_channel()
672 struct dma_chan *dma_request_slave_channel_reason(struct device *dev, in dma_request_slave_channel_reason()
694 struct dma_chan *dma_request_slave_channel(struct device *dev, in dma_request_slave_channel()
701 dma_cap_set(DMA_PRIVATE, ch->device->cap_mask); in dma_request_slave_channel()
702 ch->device->privatecnt++; in dma_request_slave_channel()
715 if (--chan->device->privatecnt == 0) in dma_release_channel()
716 dma_cap_clear(DMA_PRIVATE, chan->device->cap_mask); in dma_release_channel()
726 struct dma_device *device, *_d; in dmaengine_get() local
734 list_for_each_entry_safe(device, _d, &dma_device_list, global_node) { in dmaengine_get()
735 if (dma_has_cap(DMA_PRIVATE, device->cap_mask)) in dmaengine_get()
737 list_for_each_entry(chan, &device->channels, device_node) { in dmaengine_get()
741 list_del_rcu(&device->global_node); in dmaengine_get()
764 struct dma_device *device; in dmaengine_put() local
771 list_for_each_entry(device, &dma_device_list, global_node) { in dmaengine_put()
772 if (dma_has_cap(DMA_PRIVATE, device->cap_mask)) in dmaengine_put()
774 list_for_each_entry(chan, &device->channels, device_node) in dmaengine_put()
781 static bool device_has_all_tx_types(struct dma_device *device) in device_has_all_tx_types() argument
788 if (!dma_has_cap(DMA_INTERRUPT, device->cap_mask)) in device_has_all_tx_types()
793 if (!dma_has_cap(DMA_MEMCPY, device->cap_mask)) in device_has_all_tx_types()
798 if (!dma_has_cap(DMA_XOR, device->cap_mask)) in device_has_all_tx_types()
802 if (!dma_has_cap(DMA_XOR_VAL, device->cap_mask)) in device_has_all_tx_types()
808 if (!dma_has_cap(DMA_PQ, device->cap_mask)) in device_has_all_tx_types()
812 if (!dma_has_cap(DMA_PQ_VAL, device->cap_mask)) in device_has_all_tx_types()
820 static int get_dma_id(struct dma_device *device) in get_dma_id() argument
828 device->dev_id = rc; in get_dma_id()
838 int dma_async_device_register(struct dma_device *device) in dma_async_device_register() argument
844 if (!device) in dma_async_device_register()
848 BUG_ON(dma_has_cap(DMA_MEMCPY, device->cap_mask) && in dma_async_device_register()
849 !device->device_prep_dma_memcpy); in dma_async_device_register()
850 BUG_ON(dma_has_cap(DMA_XOR, device->cap_mask) && in dma_async_device_register()
851 !device->device_prep_dma_xor); in dma_async_device_register()
852 BUG_ON(dma_has_cap(DMA_XOR_VAL, device->cap_mask) && in dma_async_device_register()
853 !device->device_prep_dma_xor_val); in dma_async_device_register()
854 BUG_ON(dma_has_cap(DMA_PQ, device->cap_mask) && in dma_async_device_register()
855 !device->device_prep_dma_pq); in dma_async_device_register()
856 BUG_ON(dma_has_cap(DMA_PQ_VAL, device->cap_mask) && in dma_async_device_register()
857 !device->device_prep_dma_pq_val); in dma_async_device_register()
858 BUG_ON(dma_has_cap(DMA_MEMSET, device->cap_mask) && in dma_async_device_register()
859 !device->device_prep_dma_memset); in dma_async_device_register()
860 BUG_ON(dma_has_cap(DMA_INTERRUPT, device->cap_mask) && in dma_async_device_register()
861 !device->device_prep_dma_interrupt); in dma_async_device_register()
862 BUG_ON(dma_has_cap(DMA_SG, device->cap_mask) && in dma_async_device_register()
863 !device->device_prep_dma_sg); in dma_async_device_register()
864 BUG_ON(dma_has_cap(DMA_CYCLIC, device->cap_mask) && in dma_async_device_register()
865 !device->device_prep_dma_cyclic); in dma_async_device_register()
866 BUG_ON(dma_has_cap(DMA_INTERLEAVE, device->cap_mask) && in dma_async_device_register()
867 !device->device_prep_interleaved_dma); in dma_async_device_register()
869 BUG_ON(!device->device_tx_status); in dma_async_device_register()
870 BUG_ON(!device->device_issue_pending); in dma_async_device_register()
871 BUG_ON(!device->dev); in dma_async_device_register()
876 if (device_has_all_tx_types(device)) in dma_async_device_register()
877 dma_cap_set(DMA_ASYNC_TX, device->cap_mask); in dma_async_device_register()
882 rc = get_dma_id(device); in dma_async_device_register()
891 list_for_each_entry(chan, &device->channels, device_node) { in dma_async_device_register()
904 chan->dev->device.class = &dma_devclass; in dma_async_device_register()
905 chan->dev->device.parent = device->dev; in dma_async_device_register()
908 chan->dev->dev_id = device->dev_id; in dma_async_device_register()
910 dev_set_name(&chan->dev->device, "dma%dchan%d", in dma_async_device_register()
911 device->dev_id, chan->chan_id); in dma_async_device_register()
913 rc = device_register(&chan->dev->device); in dma_async_device_register()
923 device->chancnt = chancnt; in dma_async_device_register()
927 if (dmaengine_ref_count && !dma_has_cap(DMA_PRIVATE, device->cap_mask)) in dma_async_device_register()
928 list_for_each_entry(chan, &device->channels, device_node) { in dma_async_device_register()
942 list_add_tail_rcu(&device->global_node, &dma_device_list); in dma_async_device_register()
943 if (dma_has_cap(DMA_PRIVATE, device->cap_mask)) in dma_async_device_register()
944 device->privatecnt++; /* Always private */ in dma_async_device_register()
954 idr_remove(&dma_idr, device->dev_id); in dma_async_device_register()
960 list_for_each_entry(chan, &device->channels, device_node) { in dma_async_device_register()
966 device_unregister(&chan->dev->device); in dma_async_device_register()
980 void dma_async_device_unregister(struct dma_device *device) in dma_async_device_unregister() argument
985 list_del_rcu(&device->global_node); in dma_async_device_unregister()
989 list_for_each_entry(chan, &device->channels, device_node) { in dma_async_device_unregister()
996 device_unregister(&chan->dev->device); in dma_async_device_unregister()
1041 struct device *dev = unmap->dev; in dmaengine_unmap()
1112 dmaengine_get_unmap_data(struct device *dev, int nr, gfp_t flags) in dmaengine_get_unmap_data()
1196 chan->device->device_issue_pending(chan); in dma_run_dependencies()