Searched refs:cpu_addr (Results 1 - 91 of 91) sorted by relevance

/linux-4.4.14/include/asm-generic/
H A Ddma-mapping-common.h186 void *cpu_addr, dma_addr_t dma_addr, size_t size);
195 void dma_common_free_remap(void *cpu_addr, size_t size, unsigned long vm_flags);
201 * @cpu_addr: kernel CPU-view address returned from dma_alloc_attrs
211 dma_mmap_attrs(struct device *dev, struct vm_area_struct *vma, void *cpu_addr, dma_mmap_attrs() argument
217 return ops->mmap(dev, vma, cpu_addr, dma_addr, size, attrs); dma_mmap_attrs()
218 return dma_common_mmap(dev, vma, cpu_addr, dma_addr, size); dma_mmap_attrs()
225 void *cpu_addr, dma_addr_t dma_addr, size_t size);
228 dma_get_sgtable_attrs(struct device *dev, struct sg_table *sgt, void *cpu_addr, dma_get_sgtable_attrs() argument
234 return ops->get_sgtable(dev, sgt, cpu_addr, dma_addr, size, dma_get_sgtable_attrs()
236 return dma_common_get_sgtable(dev, sgt, cpu_addr, dma_addr, size); dma_get_sgtable_attrs()
250 void *cpu_addr; dma_alloc_attrs() local
254 if (dma_alloc_from_coherent(dev, size, dma_handle, &cpu_addr)) dma_alloc_attrs()
255 return cpu_addr; dma_alloc_attrs()
262 cpu_addr = ops->alloc(dev, size, dma_handle, flag, attrs); dma_alloc_attrs()
263 debug_dma_alloc_coherent(dev, size, *dma_handle, cpu_addr); dma_alloc_attrs()
264 return cpu_addr; dma_alloc_attrs()
268 void *cpu_addr, dma_addr_t dma_handle, dma_free_attrs()
276 if (dma_release_from_coherent(dev, get_order(size), cpu_addr)) dma_free_attrs()
282 debug_dma_free_coherent(dev, size, cpu_addr, dma_handle); dma_free_attrs()
283 ops->free(dev, size, cpu_addr, dma_handle, attrs); dma_free_attrs()
293 void *cpu_addr, dma_addr_t dma_handle) dma_free_coherent()
295 return dma_free_attrs(dev, size, cpu_addr, dma_handle, NULL); dma_free_coherent()
308 void *cpu_addr, dma_addr_t dma_handle) dma_free_noncoherent()
313 dma_free_attrs(dev, size, cpu_addr, dma_handle, &attrs); dma_free_noncoherent()
267 dma_free_attrs(struct device *dev, size_t size, void *cpu_addr, dma_addr_t dma_handle, struct dma_attrs *attrs) dma_free_attrs() argument
292 dma_free_coherent(struct device *dev, size_t size, void *cpu_addr, dma_addr_t dma_handle) dma_free_coherent() argument
307 dma_free_noncoherent(struct device *dev, size_t size, void *cpu_addr, dma_addr_t dma_handle) dma_free_noncoherent() argument
H A Ddma-mapping-broken.h16 dma_free_coherent(struct device *dev, size_t size, void *cpu_addr,
28 void *cpu_addr, dma_addr_t dma_handle, dma_free_attrs()
32 dma_free_coherent(dev, size, cpu_addr, dma_handle); dma_free_attrs()
27 dma_free_attrs(struct device *dev, size_t size, void *cpu_addr, dma_addr_t dma_handle, struct dma_attrs *attrs) dma_free_attrs() argument
H A Ddma-coherent.h14 void *cpu_addr, size_t size, int *ret);
/linux-4.4.14/drivers/staging/android/ion/
H A Dion_cma_heap.c37 void *cpu_addr; member in struct:ion_cma_buffer_info
64 info->cpu_addr = dma_alloc_coherent(dev, len, &(info->handle), ion_cma_allocate()
67 if (!info->cpu_addr) { ion_cma_allocate()
76 if (dma_get_sgtable(dev, info->table, info->cpu_addr, info->handle, ion_cma_allocate()
87 dma_free_coherent(dev, len, info->cpu_addr, info->handle); ion_cma_allocate()
101 dma_free_coherent(dev, buffer->size, info->cpu_addr, info->handle); ion_cma_free()
145 return dma_mmap_coherent(dev, vma, info->cpu_addr, info->handle, ion_cma_mmap()
154 return info->cpu_addr; ion_cma_map_kernel()
/linux-4.4.14/arch/x86/include/asm/xen/
H A Dpage-coherent.h18 void *cpu_addr, dma_addr_t dma_handle, xen_free_coherent_pages()
21 free_pages((unsigned long) cpu_addr, get_order(size)); xen_free_coherent_pages()
17 xen_free_coherent_pages(struct device *hwdev, size_t size, void *cpu_addr, dma_addr_t dma_handle, struct dma_attrs *attrs) xen_free_coherent_pages() argument
/linux-4.4.14/arch/s390/kvm/
H A Dtrace.h217 TP_PROTO(VCPU_PROTO_COMMON, __u8 order_code, __u16 cpu_addr, \
219 TP_ARGS(VCPU_ARGS_COMMON, order_code, cpu_addr, parameter),
224 __field(__u16, cpu_addr)
231 __entry->cpu_addr = cpu_addr;
239 __entry->cpu_addr, __entry->parameter)
243 TP_PROTO(VCPU_PROTO_COMMON, __u8 order_code, __u16 cpu_addr),
244 TP_ARGS(VCPU_ARGS_COMMON, order_code, cpu_addr),
249 __field(__u16, cpu_addr)
255 __entry->cpu_addr = cpu_addr;
262 __entry->cpu_addr)
H A Dsigp.c291 u16 cpu_addr, u32 parameter, u64 *status_reg) handle_sigp_dst()
294 struct kvm_vcpu *dst_vcpu = kvm_get_vcpu_by_id(vcpu->kvm, cpu_addr); handle_sigp_dst()
368 u16 cpu_addr) handle_sigp_order_in_user_space()
412 order_code, cpu_addr); handle_sigp_order_in_user_space()
422 u16 cpu_addr = vcpu->run->s.regs.gprs[r3]; kvm_s390_handle_sigp() local
431 if (handle_sigp_order_in_user_space(vcpu, order_code, cpu_addr)) kvm_s390_handle_sigp()
439 trace_kvm_s390_handle_sigp(vcpu, order_code, cpu_addr, parameter); kvm_s390_handle_sigp()
446 rc = handle_sigp_dst(vcpu, order_code, cpu_addr, kvm_s390_handle_sigp()
470 u16 cpu_addr = vcpu->run->s.regs.gprs[r3]; kvm_s390_handle_sigp_pei() local
474 trace_kvm_s390_handle_sigp_pei(vcpu, order_code, cpu_addr); kvm_s390_handle_sigp_pei()
477 dest_vcpu = kvm_get_vcpu_by_id(vcpu->kvm, cpu_addr); kvm_s390_handle_sigp_pei()
290 handle_sigp_dst(struct kvm_vcpu *vcpu, u8 order_code, u16 cpu_addr, u32 parameter, u64 *status_reg) handle_sigp_dst() argument
367 handle_sigp_order_in_user_space(struct kvm_vcpu *vcpu, u8 order_code, u16 cpu_addr) handle_sigp_order_in_user_space() argument
H A Dinterrupt.c434 int cpu_addr; __deliver_emergency_signal() local
437 cpu_addr = find_first_bit(li->sigp_emerg_pending, KVM_MAX_VCPUS); __deliver_emergency_signal()
438 clear_bit(cpu_addr, li->sigp_emerg_pending); __deliver_emergency_signal()
446 cpu_addr, 0); __deliver_emergency_signal()
450 rc |= put_guest_lc(vcpu, cpu_addr, (u16 *)__LC_EXT_CPU_ADDR); __deliver_emergency_signal()
/linux-4.4.14/arch/avr32/mm/
H A Ddma-coherent.c111 void *cpu_addr, dma_addr_t handle) dma_free_coherent()
113 void *addr = phys_to_cached(uncached_to_phys(cpu_addr)); dma_free_coherent()
117 cpu_addr, (unsigned long)handle, (unsigned)size); dma_free_coherent()
143 void *cpu_addr, dma_addr_t handle) dma_free_writecombine()
147 iounmap(cpu_addr); dma_free_writecombine()
110 dma_free_coherent(struct device *dev, size_t size, void *cpu_addr, dma_addr_t handle) dma_free_coherent() argument
142 dma_free_writecombine(struct device *dev, size_t size, void *cpu_addr, dma_addr_t handle) dma_free_writecombine() argument
/linux-4.4.14/drivers/base/
H A Ddma-mapping.c229 void *cpu_addr, dma_addr_t handle, size_t size) dma_common_get_sgtable()
231 struct page *page = virt_to_page(cpu_addr); dma_common_get_sgtable()
247 void *cpu_addr, dma_addr_t dma_addr, size_t size) dma_common_mmap()
253 unsigned long pfn = page_to_pfn(virt_to_page(cpu_addr)); dma_common_mmap()
258 if (dma_mmap_from_coherent(dev, vma, cpu_addr, size, &ret)) dma_common_mmap()
329 void dma_common_free_remap(void *cpu_addr, size_t size, unsigned long vm_flags) dma_common_free_remap() argument
331 struct vm_struct *area = find_vm_area(cpu_addr); dma_common_free_remap()
334 WARN(1, "trying to free invalid coherent area: %p\n", cpu_addr); dma_common_free_remap()
338 unmap_kernel_range((unsigned long)cpu_addr, size); dma_common_free_remap()
339 vunmap(cpu_addr); dma_common_free_remap()
228 dma_common_get_sgtable(struct device *dev, struct sg_table *sgt, void *cpu_addr, dma_addr_t handle, size_t size) dma_common_get_sgtable() argument
246 dma_common_mmap(struct device *dev, struct vm_area_struct *vma, void *cpu_addr, dma_addr_t dma_addr, size_t size) dma_common_mmap() argument
/linux-4.4.14/arch/m68k/include/asm/
H A Ddma-mapping.h32 void *cpu_addr, dma_addr_t dma_handle, dma_free_attrs()
36 dma_free_coherent(dev, size, cpu_addr, dma_handle); dma_free_attrs()
115 void *cpu_addr, dma_addr_t dma_addr, size_t size);
117 void *cpu_addr, dma_addr_t dma_addr,
31 dma_free_attrs(struct device *dev, size_t size, void *cpu_addr, dma_addr_t dma_handle, struct dma_attrs *attrs) dma_free_attrs() argument
/linux-4.4.14/arch/arc/include/asm/
H A Ddma-mapping.h31 void *cpu_addr, dma_addr_t dma_addr, size_t size);
33 void *cpu_addr, dma_addr_t dma_addr,
76 dma_map_single(struct device *dev, void *cpu_addr, size_t size, dma_map_single() argument
79 _dma_cache_sync((unsigned long)cpu_addr, size, dir); dma_map_single()
80 return (dma_addr_t)cpu_addr; dma_map_single()
/linux-4.4.14/drivers/media/common/saa7146/
H A Dsaa7146_core.c427 dev->d_rps0.cpu_addr = pci_zalloc_consistent(pci, SAA7146_RPS_MEM, saa7146_init_one()
429 if (!dev->d_rps0.cpu_addr) saa7146_init_one()
432 dev->d_rps1.cpu_addr = pci_zalloc_consistent(pci, SAA7146_RPS_MEM, saa7146_init_one()
434 if (!dev->d_rps1.cpu_addr) saa7146_init_one()
437 dev->d_i2c.cpu_addr = pci_zalloc_consistent(pci, SAA7146_RPS_MEM, saa7146_init_one()
439 if (!dev->d_i2c.cpu_addr) saa7146_init_one()
486 pci_free_consistent(pci, SAA7146_RPS_MEM, dev->d_i2c.cpu_addr, saa7146_init_one()
489 pci_free_consistent(pci, SAA7146_RPS_MEM, dev->d_rps1.cpu_addr, saa7146_init_one()
492 pci_free_consistent(pci, SAA7146_RPS_MEM, dev->d_rps0.cpu_addr, saa7146_init_one()
515 { dev->d_i2c.cpu_addr, dev->d_i2c.dma_handle }, saa7146_remove_one()
516 { dev->d_rps1.cpu_addr, dev->d_rps1.dma_handle }, saa7146_remove_one()
517 { dev->d_rps0.cpu_addr, dev->d_rps0.dma_handle }, saa7146_remove_one()
H A Dsaa7146_fops.c520 vv->d_clipping.cpu_addr = saa7146_vv_init()
523 if( NULL == vv->d_clipping.cpu_addr ) { saa7146_vv_init()
580 pci_free_consistent(dev->pci, SAA7146_CLIPPING_MEM, vv->d_clipping.cpu_addr, vv->d_clipping.dma_handle); saa7146_vv_release()
H A Dsaa7146_i2c.c296 __le32 *buffer = dev->d_i2c.cpu_addr; saa7146_i2c_transfer()
H A Dsaa7146_hlp.c341 __le32 *clipping = vv->d_clipping.cpu_addr; calculate_clipping_registers_rect()
/linux-4.4.14/arch/c6x/include/asm/
H A Ddma-mapping.h47 extern dma_addr_t dma_map_single(struct device *dev, void *cpu_addr,
102 struct vm_area_struct *vma, void *cpu_addr, dma_mmap_coherent()
109 void *cpu_addr, dma_addr_t dma_addr, dma_get_sgtable()
101 dma_mmap_coherent(struct device *dev, struct vm_area_struct *vma, void *cpu_addr, dma_addr_t dma_addr, size_t size) dma_mmap_coherent() argument
108 dma_get_sgtable(struct device *dev, struct sg_table *sgt, void *cpu_addr, dma_addr_t dma_addr, size_t size) dma_get_sgtable() argument
/linux-4.4.14/arch/cris/include/asm/
H A Ddma-mapping.h34 dma_free_coherent(struct device *dev, size_t size, void *cpu_addr, dma_free_coherent() argument
163 void *cpu_addr, dma_addr_t dma_addr, size_t size);
165 void *cpu_addr, dma_addr_t dma_addr,
/linux-4.4.14/arch/avr32/include/asm/
H A Ddma-mapping.h61 * @cpu_addr: CPU-view address returned from dma_alloc_coherent
67 * References to memory and mappings associated with cpu_addr/handle
71 void *cpu_addr, dma_addr_t handle);
91 * @cpu_addr: CPU-view address returned from dma_alloc_writecombine
97 * References to memory and mappings associated with cpu_addr/handle
101 void *cpu_addr, dma_addr_t handle);
106 * @cpu_addr: CPU direct mapped address of buffer
117 dma_map_single(struct device *dev, void *cpu_addr, size_t size, dma_map_single() argument
120 dma_cache_sync(dev, cpu_addr, size, direction); dma_map_single()
121 return virt_to_bus(cpu_addr); dma_map_single()
342 void *cpu_addr, dma_addr_t dma_addr, size_t size);
344 void *cpu_addr, dma_addr_t dma_addr,
/linux-4.4.14/drivers/staging/rdma/ipath/
H A Dipath_dma.c56 void *cpu_addr, size_t size, ipath_dma_map_single()
60 return (u64) cpu_addr; ipath_dma_map_single()
162 void *cpu_addr, u64 dma_handle) ipath_dma_free_coherent()
164 free_pages((unsigned long) cpu_addr, get_order(size)); ipath_dma_free_coherent()
55 ipath_dma_map_single(struct ib_device *dev, void *cpu_addr, size_t size, enum dma_data_direction direction) ipath_dma_map_single() argument
161 ipath_dma_free_coherent(struct ib_device *dev, size_t size, void *cpu_addr, u64 dma_handle) ipath_dma_free_coherent() argument
/linux-4.4.14/drivers/staging/rdma/hfi1/
H A Ddma.c71 static u64 hfi1_dma_map_single(struct ib_device *dev, void *cpu_addr, hfi1_dma_map_single() argument
77 return (u64) cpu_addr; hfi1_dma_map_single()
169 void *cpu_addr, u64 dma_handle) hfi1_dma_free_coherent()
171 free_pages((unsigned long) cpu_addr, get_order(size)); hfi1_dma_free_coherent()
168 hfi1_dma_free_coherent(struct ib_device *dev, size_t size, void *cpu_addr, u64 dma_handle) hfi1_dma_free_coherent() argument
/linux-4.4.14/drivers/infiniband/hw/qib/
H A Dqib_dma.c53 static u64 qib_dma_map_single(struct ib_device *dev, void *cpu_addr, qib_dma_map_single() argument
57 return (u64) cpu_addr; qib_dma_map_single()
152 void *cpu_addr, u64 dma_handle) qib_dma_free_coherent()
154 free_pages((unsigned long) cpu_addr, get_order(size)); qib_dma_free_coherent()
151 qib_dma_free_coherent(struct ib_device *dev, size_t size, void *cpu_addr, u64 dma_handle) qib_dma_free_coherent() argument
/linux-4.4.14/drivers/remoteproc/
H A Dwkup_m3_rproc.c37 * @cpu_addr: MPU virtual address of the memory region
43 void __iomem *cpu_addr; member in struct:wkup_m3_mem
106 va = (__force void *)(wkupm3->mem[i].cpu_addr + offset); wkup_m3_rproc_da_to_va()
176 wkupm3->mem[i].cpu_addr = devm_ioremap_resource(dev, res); wkup_m3_rproc_probe()
177 if (IS_ERR(wkupm3->mem[i].cpu_addr)) { wkup_m3_rproc_probe()
180 ret = PTR_ERR(wkupm3->mem[i].cpu_addr); wkup_m3_rproc_probe()
/linux-4.4.14/include/linux/
H A Ddma-mapping.h265 #define dma_map_single_attrs(dev, cpu_addr, size, dir, attrs) \
266 dma_map_single(dev, cpu_addr, size, dir)
287 void *cpu_addr, dma_addr_t dma_addr) dma_free_writecombine()
291 return dma_free_attrs(dev, size, cpu_addr, dma_addr, &attrs); dma_free_writecombine()
296 void *cpu_addr, dma_addr_t dma_addr, dma_mmap_writecombine()
301 return dma_mmap_attrs(dev, vma, cpu_addr, dma_addr, size, &attrs); dma_mmap_writecombine()
286 dma_free_writecombine(struct device *dev, size_t size, void *cpu_addr, dma_addr_t dma_addr) dma_free_writecombine() argument
294 dma_mmap_writecombine(struct device *dev, struct vm_area_struct *vma, void *cpu_addr, dma_addr_t dma_addr, size_t size) dma_mmap_writecombine() argument
H A Dof_address.h18 u64 cpu_addr; member in struct:of_pci_range
/linux-4.4.14/arch/arm/mm/
H A Ddma-mapping.c150 static void arm_coherent_dma_free(struct device *dev, size_t size, void *cpu_addr,
153 void *cpu_addr, dma_addr_t dma_addr, size_t size,
315 static void __dma_free_remap(void *cpu_addr, size_t size) __dma_free_remap() argument
317 dma_common_free_remap(cpu_addr, size, __dma_free_remap()
566 void *cpu_addr, size_t size, bool want_vaddr) __free_from_contiguous()
570 __dma_free_remap(cpu_addr, size); __free_from_contiguous()
595 #define __free_from_pool(cpu_addr, size) 0
596 #define __free_from_contiguous(dev, page, cpu_addr, size, wv) do { } while (0)
597 #define __dma_free_remap(cpu_addr, size) do { } while (0)
692 void *cpu_addr, dma_addr_t dma_addr, size_t size, __arm_dma_mmap()
702 if (dma_mmap_from_coherent(dev, vma, cpu_addr, size, &ret)) __arm_dma_mmap()
720 void *cpu_addr, dma_addr_t dma_addr, size_t size, arm_coherent_dma_mmap()
723 return __arm_dma_mmap(dev, vma, cpu_addr, dma_addr, size, attrs); arm_coherent_dma_mmap()
727 void *cpu_addr, dma_addr_t dma_addr, size_t size, arm_dma_mmap()
733 return __arm_dma_mmap(dev, vma, cpu_addr, dma_addr, size, attrs); arm_dma_mmap()
739 static void __arm_dma_free(struct device *dev, size_t size, void *cpu_addr, __arm_dma_free() argument
750 } else if (!is_coherent && __free_from_pool(cpu_addr, size)) { __arm_dma_free()
754 __dma_free_remap(cpu_addr, size); __arm_dma_free()
761 __free_from_contiguous(dev, page, cpu_addr, size, want_vaddr); __arm_dma_free()
765 void arm_dma_free(struct device *dev, size_t size, void *cpu_addr, arm_dma_free() argument
768 __arm_dma_free(dev, size, cpu_addr, handle, attrs, false); arm_dma_free()
771 static void arm_coherent_dma_free(struct device *dev, size_t size, void *cpu_addr, arm_coherent_dma_free() argument
774 __arm_dma_free(dev, size, cpu_addr, handle, attrs, true); arm_coherent_dma_free()
778 void *cpu_addr, dma_addr_t handle, size_t size, arm_dma_get_sgtable()
1312 static struct page **__iommu_get_pages(void *cpu_addr, struct dma_attrs *attrs) __iommu_get_pages() argument
1316 if (__in_atomic_pool(cpu_addr, PAGE_SIZE)) __iommu_get_pages()
1317 return __atomic_get_pages(cpu_addr); __iommu_get_pages()
1320 return cpu_addr; __iommu_get_pages()
1322 area = find_vm_area(cpu_addr); __iommu_get_pages()
1349 static void __iommu_free_atomic(struct device *dev, void *cpu_addr, __iommu_free_atomic() argument
1353 __free_from_pool(cpu_addr, size); __iommu_free_atomic()
1404 void *cpu_addr, dma_addr_t dma_addr, size_t size, arm_iommu_mmap_attrs()
1409 struct page **pages = __iommu_get_pages(cpu_addr, attrs); arm_iommu_mmap_attrs()
1440 void arm_iommu_free_attrs(struct device *dev, size_t size, void *cpu_addr, arm_iommu_free_attrs() argument
1446 if (__in_atomic_pool(cpu_addr, size)) { arm_iommu_free_attrs()
1447 __iommu_free_atomic(dev, cpu_addr, handle, size); arm_iommu_free_attrs()
1451 pages = __iommu_get_pages(cpu_addr, attrs); arm_iommu_free_attrs()
1453 WARN(1, "trying to free invalid coherent area: %p\n", cpu_addr); arm_iommu_free_attrs()
1458 dma_common_free_remap(cpu_addr, size, arm_iommu_free_attrs()
1467 void *cpu_addr, dma_addr_t dma_addr, arm_iommu_get_sgtable()
1471 struct page **pages = __iommu_get_pages(cpu_addr, attrs); arm_iommu_get_sgtable()
565 __free_from_contiguous(struct device *dev, struct page *page, void *cpu_addr, size_t size, bool want_vaddr) __free_from_contiguous() argument
691 __arm_dma_mmap(struct device *dev, struct vm_area_struct *vma, void *cpu_addr, dma_addr_t dma_addr, size_t size, struct dma_attrs *attrs) __arm_dma_mmap() argument
719 arm_coherent_dma_mmap(struct device *dev, struct vm_area_struct *vma, void *cpu_addr, dma_addr_t dma_addr, size_t size, struct dma_attrs *attrs) arm_coherent_dma_mmap() argument
726 arm_dma_mmap(struct device *dev, struct vm_area_struct *vma, void *cpu_addr, dma_addr_t dma_addr, size_t size, struct dma_attrs *attrs) arm_dma_mmap() argument
777 arm_dma_get_sgtable(struct device *dev, struct sg_table *sgt, void *cpu_addr, dma_addr_t handle, size_t size, struct dma_attrs *attrs) arm_dma_get_sgtable() argument
1403 arm_iommu_mmap_attrs(struct device *dev, struct vm_area_struct *vma, void *cpu_addr, dma_addr_t dma_addr, size_t size, struct dma_attrs *attrs) arm_iommu_mmap_attrs() argument
1466 arm_iommu_get_sgtable(struct device *dev, struct sg_table *sgt, void *cpu_addr, dma_addr_t dma_addr, size_t size, struct dma_attrs *attrs) arm_iommu_get_sgtable() argument
/linux-4.4.14/drivers/pci/host/
H A Dpci-xgene.c264 u64 cpu_addr, u64 pci_addr) xgene_pcie_setup_ob_reg()
286 writel(lower_32_bits(cpu_addr), base); xgene_pcie_setup_ob_reg()
287 writel(upper_32_bits(cpu_addr), base + 0x04); xgene_pcie_setup_ob_reg()
386 u64 cpu_addr = range->cpu_addr; xgene_pcie_setup_ib_reg() local
403 bar_low = pcie_bar_low_val((u32)cpu_addr, flags); xgene_pcie_setup_ib_reg()
409 writel(upper_32_bits(cpu_addr), bar_addr + 0x4); xgene_pcie_setup_ib_reg()
421 writel(upper_32_bits(cpu_addr), bar_addr + 0x4); xgene_pcie_setup_ib_reg()
464 u64 end = range.cpu_addr + range.size - 1; xgene_pcie_parse_map_dma_ranges()
467 range.flags, range.cpu_addr, end, range.pci_addr); xgene_pcie_parse_map_dma_ranges()
262 xgene_pcie_setup_ob_reg(struct xgene_pcie_port *port, struct resource *res, u32 offset, u64 cpu_addr, u64 pci_addr) xgene_pcie_setup_ob_reg() argument
H A Dpcie-designware.c155 int type, u64 cpu_addr, u64 pci_addr, u32 size) dw_pcie_prog_outbound_atu()
159 dw_pcie_writel_rc(pp, lower_32_bits(cpu_addr), PCIE_ATU_LOWER_BASE); dw_pcie_prog_outbound_atu()
160 dw_pcie_writel_rc(pp, upper_32_bits(cpu_addr), PCIE_ATU_UPPER_BASE); dw_pcie_prog_outbound_atu()
161 dw_pcie_writel_rc(pp, lower_32_bits(cpu_addr + size - 1), dw_pcie_prog_outbound_atu()
571 u64 cpu_addr; dw_pcie_rd_other_conf() local
579 cpu_addr = pp->cfg0_base; dw_pcie_rd_other_conf()
584 cpu_addr = pp->cfg1_base; dw_pcie_rd_other_conf()
590 type, cpu_addr, dw_pcie_rd_other_conf()
605 u64 cpu_addr; dw_pcie_wr_other_conf() local
613 cpu_addr = pp->cfg0_base; dw_pcie_wr_other_conf()
618 cpu_addr = pp->cfg1_base; dw_pcie_wr_other_conf()
624 type, cpu_addr, dw_pcie_wr_other_conf()
154 dw_pcie_prog_outbound_atu(struct pcie_port *pp, int index, int type, u64 cpu_addr, u64 pci_addr, u32 size) dw_pcie_prog_outbound_atu() argument
H A Dpcie-rcar.c821 u64 cpu_addr = range->cpu_addr; rcar_pcie_inbound_ranges() local
822 u64 cpu_end = range->cpu_addr + range->size; rcar_pcie_inbound_ranges()
836 if (cpu_addr > 0) { rcar_pcie_inbound_ranges()
837 unsigned long nr_zeros = __ffs64(cpu_addr); rcar_pcie_inbound_ranges()
850 while (cpu_addr < cpu_end) { rcar_pcie_inbound_ranges()
856 rcar_pci_write_reg(pcie, lower_32_bits(cpu_addr), PCIELAR(idx)); rcar_pcie_inbound_ranges()
860 rcar_pci_write_reg(pcie, upper_32_bits(cpu_addr), PCIELAR(idx+1)); rcar_pcie_inbound_ranges()
864 cpu_addr += size; rcar_pcie_inbound_ranges()
908 u64 end = range.cpu_addr + range.size - 1; rcar_pcie_parse_map_dma_ranges()
910 range.flags, range.cpu_addr, end, range.pci_addr); rcar_pcie_parse_map_dma_ranges()
H A Dpcie-xilinx.c702 offset = range.cpu_addr - range.pci_addr; xilinx_pcie_parse_and_add_res()
H A Dpci-tegra.c1615 pcie->io.start = range.cpu_addr; tegra_pcie_parse_dt()
1616 pcie->io.end = range.cpu_addr + range.size - 1; tegra_pcie_parse_dt()
/linux-4.4.14/drivers/net/wireless/ath/wcn36xx/
H A Ddxe.c182 wcn_ch->cpu_addr = dma_alloc_coherent(dev, size, &wcn_ch->dma_addr, wcn36xx_dxe_init_descs()
184 if (!wcn_ch->cpu_addr) wcn36xx_dxe_init_descs()
187 memset(wcn_ch->cpu_addr, 0, size); wcn36xx_dxe_init_descs()
189 cur_dxe = (struct wcn36xx_dxe_desc *)wcn_ch->cpu_addr; wcn36xx_dxe_init_descs()
538 void *cpu_addr; wcn36xx_dxe_allocate_mem_pools() local
547 cpu_addr = dma_alloc_coherent(wcn->dev, s, &wcn->mgmt_mem_pool.phy_addr, wcn36xx_dxe_allocate_mem_pools()
549 if (!cpu_addr) wcn36xx_dxe_allocate_mem_pools()
552 wcn->mgmt_mem_pool.virt_addr = cpu_addr; wcn36xx_dxe_allocate_mem_pools()
553 memset(cpu_addr, 0, s); wcn36xx_dxe_allocate_mem_pools()
562 cpu_addr = dma_alloc_coherent(wcn->dev, s, &wcn->data_mem_pool.phy_addr, wcn36xx_dxe_allocate_mem_pools()
564 if (!cpu_addr) wcn36xx_dxe_allocate_mem_pools()
567 wcn->data_mem_pool.virt_addr = cpu_addr; wcn36xx_dxe_allocate_mem_pools()
568 memset(cpu_addr, 0, s); wcn36xx_dxe_allocate_mem_pools()
H A Ddxe.h248 void *cpu_addr; member in struct:wcn36xx_dxe_ch
/linux-4.4.14/arch/mips/pci/
H A Dpci.c143 range.cpu_addr, pci_load_of_ranges()
144 range.cpu_addr + range.size - 1); pci_load_of_ranges()
146 (unsigned long)ioremap(range.cpu_addr, pci_load_of_ranges()
152 range.cpu_addr, pci_load_of_ranges()
153 range.cpu_addr + range.size - 1); pci_load_of_ranges()
/linux-4.4.14/arch/metag/include/asm/
H A Ddma-mapping.h24 void *cpu_addr, dma_addr_t dma_addr, size_t size);
27 void *cpu_addr, dma_addr_t dma_addr, size_t size);
189 void *cpu_addr, dma_addr_t dma_addr,
/linux-4.4.14/arch/microblaze/kernel/
H A Ddma.c159 void *cpu_addr, dma_addr_t handle, size_t size, dma_direct_mmap_coherent()
173 pfn = consistent_virt_to_pfn(cpu_addr); dma_direct_mmap_coherent()
175 pfn = virt_to_pfn(cpu_addr); dma_direct_mmap_coherent()
158 dma_direct_mmap_coherent(struct device *dev, struct vm_area_struct *vma, void *cpu_addr, dma_addr_t handle, size_t size, struct dma_attrs *attrs) dma_direct_mmap_coherent() argument
/linux-4.4.14/arch/metag/kernel/
H A Ddma.c336 void *cpu_addr, dma_addr_t dma_addr, size_t size) dma_mmap()
346 c = metag_vm_region_find(&consistent_head, (unsigned long)cpu_addr); dma_mmap()
368 void *cpu_addr, dma_addr_t dma_addr, size_t size) dma_mmap_coherent()
371 return dma_mmap(dev, vma, cpu_addr, dma_addr, size); dma_mmap_coherent()
376 void *cpu_addr, dma_addr_t dma_addr, size_t size) dma_mmap_writecombine()
379 return dma_mmap(dev, vma, cpu_addr, dma_addr, size); dma_mmap_writecombine()
335 dma_mmap(struct device *dev, struct vm_area_struct *vma, void *cpu_addr, dma_addr_t dma_addr, size_t size) dma_mmap() argument
367 dma_mmap_coherent(struct device *dev, struct vm_area_struct *vma, void *cpu_addr, dma_addr_t dma_addr, size_t size) dma_mmap_coherent() argument
375 dma_mmap_writecombine(struct device *dev, struct vm_area_struct *vma, void *cpu_addr, dma_addr_t dma_addr, size_t size) dma_mmap_writecombine() argument
/linux-4.4.14/arch/arm/include/asm/
H A Ddma-mapping.h192 * @cpu_addr: CPU-view address returned from dma_alloc_coherent
199 * References to memory and mappings associated with cpu_addr/handle
202 extern void arm_dma_free(struct device *dev, size_t size, void *cpu_addr,
209 * @cpu_addr: kernel CPU-view address returned from dma_alloc_coherent
219 void *cpu_addr, dma_addr_t dma_addr, size_t size,
282 void *cpu_addr, dma_addr_t dma_addr, size_t size,
/linux-4.4.14/arch/alpha/kernel/
H A Dpci-noop.c127 void *cpu_addr, dma_addr_t dma_addr, alpha_noop_free_coherent()
130 free_pages((unsigned long)cpu_addr, get_order(size)); alpha_noop_free_coherent()
126 alpha_noop_free_coherent(struct device *dev, size_t size, void *cpu_addr, dma_addr_t dma_addr, struct dma_attrs *attrs) alpha_noop_free_coherent() argument
H A Dpci_iommu.c252 pci_map_single_1(struct pci_dev *pdev, void *cpu_addr, size_t size, pci_map_single_1()
264 paddr = __pa(cpu_addr); pci_map_single_1()
273 cpu_addr, size, ret, __builtin_return_address(0)); pci_map_single_1()
284 cpu_addr, size, ret, __builtin_return_address(0)); pci_map_single_1()
318 ret += (unsigned long)cpu_addr & ~PAGE_MASK; pci_map_single_1()
321 cpu_addr, size, npages, ret, __builtin_return_address(0)); pci_map_single_1()
440 void *cpu_addr; alpha_pci_alloc_coherent()
446 cpu_addr = (void *)__get_free_pages(gfp, order); alpha_pci_alloc_coherent()
447 if (! cpu_addr) { alpha_pci_alloc_coherent()
455 memset(cpu_addr, 0, size); alpha_pci_alloc_coherent()
457 *dma_addrp = pci_map_single_1(pdev, cpu_addr, size, 0); alpha_pci_alloc_coherent()
459 free_pages((unsigned long)cpu_addr, order); alpha_pci_alloc_coherent()
469 size, cpu_addr, *dma_addrp, __builtin_return_address(0)); alpha_pci_alloc_coherent()
471 return cpu_addr;
481 void *cpu_addr, dma_addr_t dma_addr, alpha_pci_free_coherent()
486 free_pages((unsigned long)cpu_addr, get_order(size)); alpha_pci_free_coherent()
251 pci_map_single_1(struct pci_dev *pdev, void *cpu_addr, size_t size, int dac_allowed) pci_map_single_1() argument
439 void *cpu_addr; alpha_pci_alloc_coherent() local
479 alpha_pci_free_coherent(struct device *dev, size_t size, void *cpu_addr, dma_addr_t dma_addr, struct dma_attrs *attrs) alpha_pci_free_coherent() argument
/linux-4.4.14/arch/frv/include/asm/
H A Ddma-mapping.h137 struct vm_area_struct *vma, void *cpu_addr, dma_mmap_coherent()
144 void *cpu_addr, dma_addr_t dma_addr, dma_get_sgtable()
136 dma_mmap_coherent(struct device *dev, struct vm_area_struct *vma, void *cpu_addr, dma_addr_t dma_addr, size_t size) dma_mmap_coherent() argument
143 dma_get_sgtable(struct device *dev, struct sg_table *sgt, void *cpu_addr, dma_addr_t dma_addr, size_t size) dma_get_sgtable() argument
/linux-4.4.14/arch/arm/include/asm/xen/
H A Dpage-coherent.h28 void *cpu_addr, dma_addr_t dma_handle, xen_free_coherent_pages()
31 __generic_dma_ops(hwdev)->free(hwdev, size, cpu_addr, dma_handle, attrs); xen_free_coherent_pages()
27 xen_free_coherent_pages(struct device *hwdev, size_t size, void *cpu_addr, dma_addr_t dma_handle, struct dma_attrs *attrs) xen_free_coherent_pages() argument
/linux-4.4.14/arch/arm64/mm/
H A Ddma-mapping.c305 void *cpu_addr, dma_addr_t dma_addr, size_t size, __swiotlb_mmap()
318 if (dma_mmap_from_coherent(dev, vma, cpu_addr, size, &ret)) __swiotlb_mmap()
332 void *cpu_addr, dma_addr_t handle, size_t size, __swiotlb_get_sgtable()
441 void *cpu_addr, dma_addr_t dma_addr, size_t size, __dummy_mmap()
602 static void __iommu_free_attrs(struct device *dev, size_t size, void *cpu_addr, __iommu_free_attrs() argument
609 * @cpu_addr will be one of 3 things depending on how it was allocated: __iommu_free_attrs()
618 if (__in_atomic_pool(cpu_addr, size)) { __iommu_free_attrs()
620 __free_from_pool(cpu_addr, size); __iommu_free_attrs()
621 } else if (is_vmalloc_addr(cpu_addr)){ __iommu_free_attrs()
622 struct vm_struct *area = find_vm_area(cpu_addr); __iommu_free_attrs()
627 dma_common_free_remap(cpu_addr, size, VM_USERMAP); __iommu_free_attrs()
630 __free_pages(virt_to_page(cpu_addr), get_order(size)); __iommu_free_attrs()
635 void *cpu_addr, dma_addr_t dma_addr, size_t size, __iommu_mmap_attrs()
644 if (dma_mmap_from_coherent(dev, vma, cpu_addr, size, &ret)) __iommu_mmap_attrs()
647 area = find_vm_area(cpu_addr); __iommu_mmap_attrs()
655 void *cpu_addr, dma_addr_t dma_addr, __iommu_get_sgtable()
659 struct vm_struct *area = find_vm_area(cpu_addr); __iommu_get_sgtable()
303 __swiotlb_mmap(struct device *dev, struct vm_area_struct *vma, void *cpu_addr, dma_addr_t dma_addr, size_t size, struct dma_attrs *attrs) __swiotlb_mmap() argument
331 __swiotlb_get_sgtable(struct device *dev, struct sg_table *sgt, void *cpu_addr, dma_addr_t handle, size_t size, struct dma_attrs *attrs) __swiotlb_get_sgtable() argument
439 __dummy_mmap(struct device *dev, struct vm_area_struct *vma, void *cpu_addr, dma_addr_t dma_addr, size_t size, struct dma_attrs *attrs) __dummy_mmap() argument
634 __iommu_mmap_attrs(struct device *dev, struct vm_area_struct *vma, void *cpu_addr, dma_addr_t dma_addr, size_t size, struct dma_attrs *attrs) __iommu_mmap_attrs() argument
654 __iommu_get_sgtable(struct device *dev, struct sg_table *sgt, void *cpu_addr, dma_addr_t dma_addr, size_t size, struct dma_attrs *attrs) __iommu_get_sgtable() argument
/linux-4.4.14/drivers/of/
H A Dof_pci.c215 range.cpu_addr, range.cpu_addr + range.size - 1, of_pci_get_host_bridge_resources()
222 if (range.cpu_addr == OF_BAD_ADDR || range.size == 0) of_pci_get_host_bridge_resources()
247 *io_base = range.cpu_addr; of_pci_get_host_bridge_resources()
H A Daddress.c266 range->cpu_addr = of_translate_address(parser->node, of_pci_range_parser_one()
275 u64 pci_addr, cpu_addr, size; of_pci_range_parser_one() local
280 cpu_addr = of_translate_address(parser->node, of_pci_range_parser_one()
287 cpu_addr != range->cpu_addr + range->size) of_pci_range_parser_one()
323 err = pci_register_io_range(range->cpu_addr, range->size); of_pci_range_to_resource()
326 port = pci_address_to_pio(range->cpu_addr); of_pci_range_to_resource()
334 upper_32_bits(range->cpu_addr)) { of_pci_range_to_resource()
339 res->start = range->cpu_addr; of_pci_range_to_resource()
997 pr_debug("dma_addr(%llx) cpu_addr(%llx) size(%llx)\n", of_dma_get_range()
/linux-4.4.14/arch/ia64/sn/pci/
H A Dpci_dma.c134 * @cpu_addr: kernel virtual address to free
140 static void sn_dma_free_coherent(struct device *dev, size_t size, void *cpu_addr, sn_dma_free_coherent() argument
149 free_pages((unsigned long)cpu_addr, get_order(size)); sn_dma_free_coherent()
155 * @cpu_addr: kernel virtual address of the region to map
160 * Map the region pointed to by @cpu_addr for DMA and return the
181 void *cpu_addr = page_address(page) + offset; sn_dma_map_page() local
192 phys_addr = __pa(cpu_addr); sn_dma_map_page()
/linux-4.4.14/arch/powerpc/mm/
H A Ddma-noncoherent.c406 unsigned long __dma_get_coherent_pfn(unsigned long cpu_addr) __dma_get_coherent_pfn() argument
412 pgd_t *pgd = pgd_offset_k(cpu_addr); __dma_get_coherent_pfn()
413 pud_t *pud = pud_offset(pgd, cpu_addr); __dma_get_coherent_pfn()
414 pmd_t *pmd = pmd_offset(pud, cpu_addr); __dma_get_coherent_pfn()
415 pte_t *ptep = pte_offset_kernel(pmd, cpu_addr); __dma_get_coherent_pfn()
/linux-4.4.14/drivers/media/pci/smipcie/
H A Dsmipcie-main.c323 port->cpu_addr[0], (finishedData / 188)); smi_dma_xfer()
325 port->cpu_addr[0], finishedData);*/ smi_dma_xfer()
346 port->cpu_addr[1], (finishedData / 188)); smi_dma_xfer()
348 port->cpu_addr[1], finishedData);*/ smi_dma_xfer()
362 if (port->cpu_addr[0]) { smi_port_dma_free()
364 port->cpu_addr[0], port->dma_addr[0]); smi_port_dma_free()
365 port->cpu_addr[0] = NULL; smi_port_dma_free()
367 if (port->cpu_addr[1]) { smi_port_dma_free()
369 port->cpu_addr[1], port->dma_addr[1]); smi_port_dma_free()
370 port->cpu_addr[1] = NULL; smi_port_dma_free()
410 port->cpu_addr[0] = pci_alloc_consistent(port->dev->pci_dev, smi_port_init()
413 if (!port->cpu_addr[0]) { smi_port_init()
422 port->cpu_addr[1] = pci_alloc_consistent(port->dev->pci_dev, smi_port_init()
425 if (!port->cpu_addr[1]) { smi_port_init()
H A Dsmipcie.h265 u8 *cpu_addr[2]; member in struct:smi_port
/linux-4.4.14/arch/microblaze/pci/
H A Dpci-common.c574 pr_debug("cpu_addr:0x%016llx size:0x%016llx\n", pci_process_bridge_OF_ranges()
575 range.cpu_addr, range.size); pci_process_bridge_OF_ranges()
582 if (range.cpu_addr == OF_BAD_ADDR || range.size == 0) pci_process_bridge_OF_ranges()
590 range.cpu_addr, range.cpu_addr + range.size - 1, pci_process_bridge_OF_ranges()
603 hose->io_base_virt = ioremap(range.cpu_addr, pci_process_bridge_OF_ranges()
614 hose->io_base_phys = range.cpu_addr - range.pci_addr; pci_process_bridge_OF_ranges()
618 range.cpu_addr = range.pci_addr; pci_process_bridge_OF_ranges()
623 range.cpu_addr, range.cpu_addr + range.size - 1, pci_process_bridge_OF_ranges()
635 isa_mb = range.cpu_addr; pci_process_bridge_OF_ranges()
638 isa_mem_base = range.cpu_addr; pci_process_bridge_OF_ranges()
639 hose->isa_mem_phys = range.cpu_addr; pci_process_bridge_OF_ranges()
650 hose->pci_mem_offset = range.cpu_addr - pci_process_bridge_OF_ranges()
653 hose->pci_mem_offset != range.cpu_addr - pci_process_bridge_OF_ranges()
666 res->start = range.cpu_addr; pci_process_bridge_OF_ranges()
667 res->end = range.cpu_addr + range.size - 1; pci_process_bridge_OF_ranges()
/linux-4.4.14/drivers/isdn/hardware/eicon/
H A Ddivasmain.c358 void *cpu_addr; diva_init_dma_map() local
362 if (!(cpu_addr = diva_pci_alloc_consistent(pdev, diva_init_dma_map()
369 diva_init_dma_map_entry(pmap, i, cpu_addr, diva_init_dma_map()
373 i, (unsigned long) cpu_addr, diva_init_dma_map()
390 void *cpu_addr; diva_free_dma_map() local
395 diva_get_dma_map_entry(pmap, i, &cpu_addr, &phys_addr); diva_free_dma_map()
396 if (!cpu_addr) { diva_free_dma_map()
404 (unsigned long) cpu_addr, (dword) dma_handle, diva_free_dma_map()
/linux-4.4.14/arch/powerpc/include/asm/
H A Ddma-mapping.h34 void *cpu_addr, dma_addr_t handle,
52 extern unsigned long __dma_get_coherent_pfn(unsigned long cpu_addr);
/linux-4.4.14/arch/s390/hypfs/
H A Dhypfs_diag.c175 __u16 cpu_addr; member in struct:cpu_info
185 __u16 cpu_addr; member in struct:x_cpu_info
224 return ((struct cpu_info *)hdr)->cpu_addr; cpu_info__cpu_addr()
226 return ((struct x_cpu_info *)hdr)->cpu_addr; cpu_info__cpu_addr()
289 __u16 cpu_addr; member in struct:phys_cpu
298 __u16 cpu_addr; member in struct:x_phys_cpu
317 return ((struct phys_cpu *)hdr)->cpu_addr; phys_cpu__cpu_addr()
319 return ((struct x_phys_cpu *)hdr)->cpu_addr; phys_cpu__cpu_addr()
/linux-4.4.14/arch/mn10300/include/asm/
H A Ddma-mapping.h173 struct vm_area_struct *vma, void *cpu_addr, dma_mmap_coherent()
180 void *cpu_addr, dma_addr_t dma_addr, dma_get_sgtable()
172 dma_mmap_coherent(struct device *dev, struct vm_area_struct *vma, void *cpu_addr, dma_addr_t dma_addr, size_t size) dma_mmap_coherent() argument
179 dma_get_sgtable(struct device *dev, struct sg_table *sgt, void *cpu_addr, dma_addr_t dma_addr, size_t size) dma_get_sgtable() argument
/linux-4.4.14/arch/nios2/include/asm/
H A Ddma-mapping.h132 void *cpu_addr, dma_addr_t dma_addr, size_t size);
134 void *cpu_addr, dma_addr_t dma_addr,
/linux-4.4.14/arch/blackfin/include/asm/
H A Ddma-mapping.h159 void *cpu_addr, dma_addr_t dma_addr, size_t size);
161 void *cpu_addr, dma_addr_t dma_addr,
/linux-4.4.14/arch/powerpc/kernel/
H A Ddma.c179 void *cpu_addr, dma_addr_t handle, size_t size, dma_direct_mmap_coherent()
186 pfn = __dma_get_coherent_pfn((unsigned long)cpu_addr); dma_direct_mmap_coherent()
188 pfn = page_to_pfn(virt_to_page(cpu_addr)); dma_direct_mmap_coherent()
178 dma_direct_mmap_coherent(struct device *dev, struct vm_area_struct *vma, void *cpu_addr, dma_addr_t handle, size_t size, struct dma_attrs *attrs) dma_direct_mmap_coherent() argument
H A Dpci-common.c694 if (range.cpu_addr == OF_BAD_ADDR || range.size == 0) pci_process_bridge_OF_ranges()
703 range.cpu_addr, range.cpu_addr + range.size - 1, pci_process_bridge_OF_ranges()
718 hose->io_base_virt = ioremap(range.cpu_addr, pci_process_bridge_OF_ranges()
730 hose->io_base_phys = range.cpu_addr - range.pci_addr; pci_process_bridge_OF_ranges()
734 range.cpu_addr = range.pci_addr; pci_process_bridge_OF_ranges()
739 range.cpu_addr, range.cpu_addr + range.size - 1, pci_process_bridge_OF_ranges()
753 isa_mem_base = range.cpu_addr; pci_process_bridge_OF_ranges()
754 hose->isa_mem_phys = range.cpu_addr; pci_process_bridge_OF_ranges()
759 hose->mem_offset[memno] = range.cpu_addr - pci_process_bridge_OF_ranges()
767 res->start = range.cpu_addr; pci_process_bridge_OF_ranges()
768 res->end = range.cpu_addr + range.size - 1; pci_process_bridge_OF_ranges()
/linux-4.4.14/drivers/gpu/drm/radeon/
H A Dradeon_fence.c66 if (drv->cpu_addr) { radeon_fence_write()
67 *drv->cpu_addr = cpu_to_le32(seq); radeon_fence_write()
89 if (drv->cpu_addr) { radeon_fence_read()
90 seq = le32_to_cpu(*drv->cpu_addr); radeon_fence_read()
811 rdev->fence_drv[ring].cpu_addr = &rdev->wb.wb[index/4]; radeon_fence_driver_start_ring()
818 rdev->fence_drv[ring].cpu_addr = rdev->uvd.cpu_addr + index; radeon_fence_driver_start_ring()
831 rdev->fence_drv[ring].cpu_addr = &rdev->wb.wb[index/4]; radeon_fence_driver_start_ring()
837 ring, rdev->fence_drv[ring].gpu_addr, rdev->fence_drv[ring].cpu_addr); radeon_fence_driver_start_ring()
856 rdev->fence_drv[ring].cpu_addr = NULL; radeon_fence_driver_init_ring()
H A Dradeon_uvd.c167 r = radeon_bo_kmap(rdev->uvd.vcpu_bo, &rdev->uvd.cpu_addr); radeon_uvd_init()
245 memcpy(rdev->uvd.cpu_addr, rdev->uvd_fw->data, rdev->uvd_fw->size); radeon_uvd_resume()
250 ptr = rdev->uvd.cpu_addr; radeon_uvd_resume()
735 uint32_t *msg = rdev->uvd.cpu_addr + offs; radeon_uvd_get_create_msg()
771 uint32_t *msg = rdev->uvd.cpu_addr + offs; radeon_uvd_get_destroy_msg()
H A Dradeon_vce.c223 void *cpu_addr; radeon_vce_resume() local
235 r = radeon_bo_kmap(rdev->vce.vcpu_bo, &cpu_addr); radeon_vce_resume()
242 memset(cpu_addr, 0, radeon_bo_size(rdev->vce.vcpu_bo)); radeon_vce_resume()
244 r = vce_v1_0_load_fw(rdev, cpu_addr); radeon_vce_resume()
246 memcpy(cpu_addr, rdev->vce_fw->data, rdev->vce_fw->size); radeon_vce_resume()
H A Duvd_v1_0.c144 WREG32(UVD_FW_START, *((uint32_t*)rdev->uvd.cpu_addr)); uvd_v1_0_resume()
H A Dradeon.h358 volatile uint32_t *cpu_addr; member in struct:radeon_fence_driver
1678 void *cpu_addr; member in struct:radeon_uvd
/linux-4.4.14/arch/mips/mm/
H A Ddma-default.c201 void *cpu_addr, dma_addr_t dma_addr, size_t size, mips_dma_mmap()
206 unsigned long addr = (unsigned long)cpu_addr; mips_dma_mmap()
221 if (dma_mmap_from_coherent(dev, vma, cpu_addr, size, &ret)) mips_dma_mmap()
200 mips_dma_mmap(struct device *dev, struct vm_area_struct *vma, void *cpu_addr, dma_addr_t dma_addr, size_t size, struct dma_attrs *attrs) mips_dma_mmap() argument
/linux-4.4.14/drivers/gpu/drm/amd/amdgpu/
H A Damdgpu_fence.c65 if (drv->cpu_addr) amdgpu_fence_write()
66 *drv->cpu_addr = cpu_to_le32(seq); amdgpu_fence_write()
82 if (drv->cpu_addr) amdgpu_fence_read()
83 seq = le32_to_cpu(*drv->cpu_addr); amdgpu_fence_read()
442 ring->fence_drv.cpu_addr = &adev->wb.wb[ring->fence_offs]; amdgpu_fence_driver_start_ring()
447 ring->fence_drv.cpu_addr = adev->uvd.cpu_addr + index; amdgpu_fence_driver_start_ring()
459 ring->fence_drv.gpu_addr, ring->fence_drv.cpu_addr); amdgpu_fence_driver_start_ring()
476 ring->fence_drv.cpu_addr = NULL; amdgpu_fence_driver_init_ring()
H A Damdgpu_uvd.c189 r = amdgpu_bo_kmap(adev->uvd.vcpu_bo, &adev->uvd.cpu_addr); amdgpu_uvd_sw_init()
276 memcpy(adev->uvd.cpu_addr, (adev->uvd.fw->data) + offset, amdgpu_uvd_resume()
283 ptr = adev->uvd.cpu_addr; amdgpu_uvd_resume()
H A Damdgpu_vce.c236 void *cpu_addr; amdgpu_vce_resume() local
250 r = amdgpu_bo_kmap(adev->vce.vcpu_bo, &cpu_addr); amdgpu_vce_resume()
259 memcpy(cpu_addr, (adev->vce.fw->data) + offset, amdgpu_vce_resume()
H A Damdgpu.h393 volatile uint32_t *cpu_addr; member in struct:amdgpu_fence_driver
1674 void *cpu_addr; member in struct:amdgpu_uvd
/linux-4.4.14/include/media/
H A Dsaa7146_vv.h14 dev->d_rps0.cpu_addr[ count++ ] = cpu_to_le32(x); \
18 dev->d_rps1.cpu_addr[ count++ ] = cpu_to_le32(x); \
H A Dsaa7146.h113 __le32 *cpu_addr; member in struct:saa7146_dma
/linux-4.4.14/arch/parisc/include/asm/
H A Ddma-mapping.h246 struct vm_area_struct *vma, void *cpu_addr, dma_mmap_coherent()
253 void *cpu_addr, dma_addr_t dma_addr, dma_get_sgtable()
245 dma_mmap_coherent(struct device *dev, struct vm_area_struct *vma, void *cpu_addr, dma_addr_t dma_addr, size_t size) dma_mmap_coherent() argument
252 dma_get_sgtable(struct device *dev, struct sg_table *sgt, void *cpu_addr, dma_addr_t dma_addr, size_t size) dma_get_sgtable() argument
/linux-4.4.14/drivers/crypto/
H A Dimg-hash.c123 void __iomem *cpu_addr; member in struct:img_hash_dev
203 writel_relaxed(buffer[count], hdev->cpu_addr); img_hash_xmit_cpu()
915 hdev->cpu_addr = devm_ioremap_resource(dev, hash_res); img_hash_probe()
916 if (IS_ERR(hdev->cpu_addr)) { img_hash_probe()
918 err = PTR_ERR(hdev->cpu_addr); img_hash_probe()
/linux-4.4.14/drivers/scsi/
H A D3w-9xxx.c524 unsigned long *cpu_addr; twa_allocate_memory() local
527 cpu_addr = pci_alloc_consistent(tw_dev->tw_pci_dev, size*TW_Q_LENGTH, &dma_handle); twa_allocate_memory()
528 if (!cpu_addr) { twa_allocate_memory()
533 if ((unsigned long)cpu_addr % (TW_ALIGNMENT_9000)) { twa_allocate_memory()
535 pci_free_consistent(tw_dev->tw_pci_dev, size*TW_Q_LENGTH, cpu_addr, dma_handle); twa_allocate_memory()
539 memset(cpu_addr, 0, size*TW_Q_LENGTH); twa_allocate_memory()
545 tw_dev->command_packet_virt[i] = (TW_Command_Full *)((unsigned char *)cpu_addr + (i*size)); twa_allocate_memory()
549 tw_dev->generic_buffer_virt[i] = (unsigned long *)((unsigned char *)cpu_addr + (i*size)); twa_allocate_memory()
643 unsigned long *cpu_addr, data_buffer_length_adjusted = 0, flags = 0; twa_chrdev_ioctl() local
682 cpu_addr = dma_alloc_coherent(&tw_dev->tw_pci_dev->dev, data_buffer_length_adjusted+sizeof(TW_Ioctl_Buf_Apache) - 1, &dma_handle, GFP_KERNEL); twa_chrdev_ioctl()
683 if (!cpu_addr) { twa_chrdev_ioctl()
688 tw_ioctl = (TW_Ioctl_Buf_Apache *)cpu_addr; twa_chrdev_ioctl()
877 dma_free_coherent(&tw_dev->tw_pci_dev->dev, data_buffer_length_adjusted+sizeof(TW_Ioctl_Buf_Apache) - 1, cpu_addr, dma_handle); twa_chrdev_ioctl()
H A D3w-sas.c650 unsigned long *cpu_addr; twl_allocate_memory() local
653 cpu_addr = pci_zalloc_consistent(tw_dev->tw_pci_dev, size * TW_Q_LENGTH, twl_allocate_memory()
655 if (!cpu_addr) { twl_allocate_memory()
664 tw_dev->command_packet_virt[i] = (TW_Command_Full *)((unsigned char *)cpu_addr + (i*size)); twl_allocate_memory()
668 tw_dev->generic_buffer_virt[i] = (unsigned long *)((unsigned char *)cpu_addr + (i*size)); twl_allocate_memory()
672 tw_dev->sense_buffer_virt[i] = (TW_Command_Apache_Header *)((unsigned char *)cpu_addr + (i*size)); twl_allocate_memory()
722 unsigned long *cpu_addr, data_buffer_length_adjusted = 0, flags = 0; twl_chrdev_ioctl() local
755 cpu_addr = dma_alloc_coherent(&tw_dev->tw_pci_dev->dev, data_buffer_length_adjusted+sizeof(TW_Ioctl_Buf_Apache) - 1, &dma_handle, GFP_KERNEL); twl_chrdev_ioctl()
756 if (!cpu_addr) { twl_chrdev_ioctl()
761 tw_ioctl = (TW_Ioctl_Buf_Apache *)cpu_addr; twl_chrdev_ioctl()
826 dma_free_coherent(&tw_dev->tw_pci_dev->dev, data_buffer_length_adjusted+sizeof(TW_Ioctl_Buf_Apache) - 1, cpu_addr, dma_handle); twl_chrdev_ioctl()
H A D3w-xxxx.c834 unsigned long *cpu_addr = NULL; tw_allocate_memory() local
838 cpu_addr = pci_alloc_consistent(tw_dev->tw_pci_dev, size*TW_Q_LENGTH, &dma_handle); tw_allocate_memory()
839 if (cpu_addr == NULL) { tw_allocate_memory()
844 if ((unsigned long)cpu_addr % (tw_dev->tw_pci_dev->device == TW_DEVICE_ID ? TW_ALIGNMENT_6000 : TW_ALIGNMENT_7000)) { tw_allocate_memory()
846 pci_free_consistent(tw_dev->tw_pci_dev, size*TW_Q_LENGTH, cpu_addr, dma_handle); tw_allocate_memory()
850 memset(cpu_addr, 0, size*TW_Q_LENGTH); tw_allocate_memory()
856 tw_dev->command_packet_virtual_address[i] = (unsigned long *)((unsigned char *)cpu_addr + (i*size)); tw_allocate_memory()
860 tw_dev->alignment_virtual_address[i] = (unsigned long *)((unsigned char *)cpu_addr + (i*size)); tw_allocate_memory()
881 unsigned long *cpu_addr; tw_chrdev_ioctl() local
912 cpu_addr = dma_alloc_coherent(&tw_dev->tw_pci_dev->dev, data_buffer_length_adjusted+sizeof(TW_New_Ioctl) - 1, &dma_handle, GFP_KERNEL); tw_chrdev_ioctl()
913 if (cpu_addr == NULL) { tw_chrdev_ioctl()
918 tw_ioctl = (TW_New_Ioctl *)cpu_addr; tw_chrdev_ioctl()
1022 dma_free_coherent(&tw_dev->tw_pci_dev->dev, data_buffer_length_adjusted+sizeof(TW_New_Ioctl) - 1, cpu_addr, dma_handle); tw_chrdev_ioctl()
/linux-4.4.14/drivers/s390/cio/
H A Dcss.h54 __u32 cpu_addr : 16; /* CPU address */
H A Dcss.c705 css->global_pgid.pgid_high.cpu_addr = stap(); css_generate_pgid()
/linux-4.4.14/drivers/net/caif/
H A Dcaif_spi.c87 static inline void dma_free(void *cpu_addr, dma_addr_t handle) dma_free() argument
89 kfree(cpu_addr); dma_free()
100 static inline void dma_free(void *cpu_addr, dma_addr_t handle) dma_free() argument
102 dma_free_coherent(NULL, SPI_DMA_BUF_LEN, cpu_addr, handle); dma_free()
/linux-4.4.14/include/rdma/
H A Dib_verbs.h1566 size_t size, void *cpu_addr,
2604 * @cpu_addr: The kernel virtual address
2609 void *cpu_addr, size_t size, ib_dma_map_single()
2613 return dev->dma_ops->map_single(dev, cpu_addr, size, direction); ib_dma_map_single()
2614 return dma_map_single(dev->dma_device, cpu_addr, size, direction); ib_dma_map_single()
2635 void *cpu_addr, size_t size, ib_dma_map_single_attrs()
2639 return dma_map_single_attrs(dev->dma_device, cpu_addr, size, ib_dma_map_single_attrs()
2828 * @cpu_addr: the address returned by ib_dma_alloc_coherent()
2832 size_t size, void *cpu_addr, ib_dma_free_coherent()
2836 dev->dma_ops->free_coherent(dev, size, cpu_addr, dma_handle); ib_dma_free_coherent()
2838 dma_free_coherent(dev->dma_device, size, cpu_addr, dma_handle); ib_dma_free_coherent()
2608 ib_dma_map_single(struct ib_device *dev, void *cpu_addr, size_t size, enum dma_data_direction direction) ib_dma_map_single() argument
2634 ib_dma_map_single_attrs(struct ib_device *dev, void *cpu_addr, size_t size, enum dma_data_direction direction, struct dma_attrs *attrs) ib_dma_map_single_attrs() argument
2831 ib_dma_free_coherent(struct ib_device *dev, size_t size, void *cpu_addr, u64 dma_handle) ib_dma_free_coherent() argument
/linux-4.4.14/arch/arm/mach-omap2/
H A Dcommon.h268 extern void omap_auxcoreboot_addr(u32 cpu_addr);
/linux-4.4.14/drivers/staging/rdma/ehca/
H A Dehca_mrmw.c2476 static u64 ehca_dma_map_single(struct ib_device *dev, void *cpu_addr, ehca_dma_map_single() argument
2479 if (cpu_addr) ehca_dma_map_single()
2480 return ehca_map_vaddr(cpu_addr); ehca_dma_map_single()
2574 void *cpu_addr, u64 dma_handle) ehca_dma_free_coherent()
2576 if (cpu_addr && size) ehca_dma_free_coherent()
2577 free_pages((unsigned long)cpu_addr, get_order(size)); ehca_dma_free_coherent()
2573 ehca_dma_free_coherent(struct ib_device *dev, size_t size, void *cpu_addr, u64 dma_handle) ehca_dma_free_coherent() argument
/linux-4.4.14/arch/sparc/kernel/
H A Dioport.c477 * cpu_addr is what was returned from pci_alloc_consistent,
481 * References to the memory and mappings associated with cpu_addr/dma_addr
/linux-4.4.14/drivers/tty/serial/
H A Dmsm_serial.c316 void *cpu_addr; msm_handle_tx_dma() local
320 cpu_addr = &xmit->buf[xmit->tail]; msm_handle_tx_dma()
322 dma->phys = dma_map_single(port->dev, cpu_addr, count, dma->dir); msm_handle_tx_dma()
/linux-4.4.14/drivers/rpmsg/
H A Dvirtio_rpmsg_bus.c1008 void *cpu_addr = vrp->rbufs + i * RPMSG_BUF_SIZE; rpmsg_probe() local
1010 sg_init_one(&sg, cpu_addr, RPMSG_BUF_SIZE); rpmsg_probe()
1012 err = virtqueue_add_inbuf(vrp->rvq, &sg, 1, cpu_addr, rpmsg_probe()
/linux-4.4.14/arch/powerpc/platforms/cell/
H A Diommu.c903 u64 cpu_addr, size, best_size, dev_addr = OF_BAD_ADDR; cell_iommu_get_fixed_address() local
939 cpu_addr = of_translate_dma_address(np, ranges + i + naddr); cell_iommu_get_fixed_address()
942 if (cpu_addr == 0 && size > best_size) { cell_iommu_get_fixed_address()
/linux-4.4.14/arch/powerpc/sysdev/
H A Dppc4xx_pci.c116 u64 cpu_addr = of_translate_dma_address(hose->dn, ranges + 3); ppc4xx_parse_dma_ranges() local
119 if (cpu_addr == OF_BAD_ADDR || size == 0) ppc4xx_parse_dma_ranges()
129 if (cpu_addr != 0 || pci_addr > 0xffffffff) { ppc4xx_parse_dma_ranges()
133 pci_addr, pci_addr + size - 1, cpu_addr); ppc4xx_parse_dma_ranges()
/linux-4.4.14/drivers/gpu/drm/amd/amdkfd/
H A Dkfd_device.c500 pr_debug("kfd: gpu_addr = %p, cpu_addr = %p\n", kfd_gtt_sa_allocate()
/linux-4.4.14/drivers/parisc/
H A Dccio-dma.c864 * @cpu_addr: The cpu address returned from the ccio_alloc_consistent.
870 ccio_free_consistent(struct device *dev, size_t size, void *cpu_addr, ccio_free_consistent() argument
874 free_pages((unsigned long)cpu_addr, get_order(size)); ccio_free_consistent()
/linux-4.4.14/drivers/block/
H A DDAC960.c223 void *cpu_addr; init_dma_loaf() local
226 cpu_addr = pci_alloc_consistent(dev, len, &dma_handle); init_dma_loaf()
227 if (cpu_addr == NULL) init_dma_loaf()
230 loaf->cpu_free = loaf->cpu_base = cpu_addr; init_dma_loaf()
233 memset(cpu_addr, 0, len); init_dma_loaf()
241 void *cpu_addr = loaf->cpu_free; slice_dma_loaf() local
247 return cpu_addr; slice_dma_loaf()
/linux-4.4.14/drivers/net/ethernet/sun/
H A Dniu.c9595 void *cpu_addr, u64 handle) niu_pci_free_coherent()
9597 dma_free_coherent(dev, size, cpu_addr, handle); niu_pci_free_coherent()
9613 static u64 niu_pci_map_single(struct device *dev, void *cpu_addr, niu_pci_map_single() argument
9617 return dma_map_single(dev, cpu_addr, size, direction); niu_pci_map_single()
9973 void *cpu_addr, u64 handle) niu_phys_free_coherent()
9977 free_pages((unsigned long) cpu_addr, order); niu_phys_free_coherent()
9993 static u64 niu_phys_map_single(struct device *dev, void *cpu_addr, niu_phys_map_single() argument
9997 return __pa(cpu_addr); niu_phys_map_single()
9594 niu_pci_free_coherent(struct device *dev, size_t size, void *cpu_addr, u64 handle) niu_pci_free_coherent() argument
9972 niu_phys_free_coherent(struct device *dev, size_t size, void *cpu_addr, u64 handle) niu_phys_free_coherent() argument
H A Dniu.h3131 void *cpu_addr, u64 handle);
3137 u64 (*map_single)(struct device *dev, void *cpu_addr,

Completed in 3766 milliseconds