dma               124 arch/arm/common/sa1111.c 	bool		dma;
dma               133 arch/arm/common/sa1111.c 		.dma		= true,
dma               147 arch/arm/common/sa1111.c 		.dma		= true,
dma               764 arch/arm/common/sa1111.c 	if (info->dma && sachip->dev->dma_mask) {
dma               158 arch/arm/include/asm/ecard.h 	CONST unsigned int	dma;		/* DMA number (for request_dma)	*/
dma                46 arch/arm/include/asm/mach/dma.h extern int isa_dma_add(unsigned int, dma_t *dma);
dma                44 arch/arm/kernel/dma-isa.c static int isa_get_dma_residue(unsigned int chan, dma_t *dma)
dma                61 arch/arm/kernel/dma-isa.c static void isa_enable_dma(unsigned int chan, dma_t *dma)
dma                63 arch/arm/kernel/dma-isa.c 	if (dma->invalid) {
dma                68 arch/arm/kernel/dma-isa.c 		mode = (chan & 3) | dma->dma_mode;
dma                69 arch/arm/kernel/dma-isa.c 		switch (dma->dma_mode & DMA_MODE_MASK) {
dma                87 arch/arm/kernel/dma-isa.c 		if (!dma->sg) {
dma                92 arch/arm/kernel/dma-isa.c 			dma->sg = &dma->buf;
dma                93 arch/arm/kernel/dma-isa.c 			dma->sgcount = 1;
dma                94 arch/arm/kernel/dma-isa.c 			dma->buf.length = dma->count;
dma                95 arch/arm/kernel/dma-isa.c 			dma->buf.dma_address = dma_map_single(&isa_dma_dev,
dma                96 arch/arm/kernel/dma-isa.c 				dma->addr, dma->count,
dma               100 arch/arm/kernel/dma-isa.c 		address = dma->buf.dma_address;
dma               101 arch/arm/kernel/dma-isa.c 		length  = dma->buf.length - 1;
dma               120 arch/arm/kernel/dma-isa.c 		dma->invalid = 0;
dma               125 arch/arm/kernel/dma-isa.c static void isa_disable_dma(unsigned int chan, dma_t *dma)
dma                36 arch/arm/kernel/dma.c int __init isa_dma_add(unsigned int chan, dma_t *dma)
dma                38 arch/arm/kernel/dma.c 	if (!dma->d_ops)
dma                41 arch/arm/kernel/dma.c 	sg_init_table(&dma->buf, 1);
dma                45 arch/arm/kernel/dma.c 	dma_chan[chan] = dma;
dma                56 arch/arm/kernel/dma.c 	dma_t *dma = dma_channel(chan);
dma                59 arch/arm/kernel/dma.c 	if (!dma)
dma                62 arch/arm/kernel/dma.c 	if (xchg(&dma->lock, 1) != 0)
dma                65 arch/arm/kernel/dma.c 	dma->device_id = device_id;
dma                66 arch/arm/kernel/dma.c 	dma->active    = 0;
dma                67 arch/arm/kernel/dma.c 	dma->invalid   = 1;
dma                70 arch/arm/kernel/dma.c 	if (dma->d_ops->request)
dma                71 arch/arm/kernel/dma.c 		ret = dma->d_ops->request(chan, dma);
dma                74 arch/arm/kernel/dma.c 		xchg(&dma->lock, 0);
dma                94 arch/arm/kernel/dma.c 	dma_t *dma = dma_channel(chan);
dma                96 arch/arm/kernel/dma.c 	if (!dma)
dma                99 arch/arm/kernel/dma.c 	if (dma->active) {
dma               101 arch/arm/kernel/dma.c 		dma->d_ops->disable(chan, dma);
dma               102 arch/arm/kernel/dma.c 		dma->active = 0;
dma               105 arch/arm/kernel/dma.c 	if (xchg(&dma->lock, 0) != 0) {
dma               106 arch/arm/kernel/dma.c 		if (dma->d_ops->free)
dma               107 arch/arm/kernel/dma.c 			dma->d_ops->free(chan, dma);
dma               123 arch/arm/kernel/dma.c 	dma_t *dma = dma_channel(chan);
dma               125 arch/arm/kernel/dma.c 	if (dma->active)
dma               128 arch/arm/kernel/dma.c 	dma->sg = sg;
dma               129 arch/arm/kernel/dma.c 	dma->sgcount = nr_sg;
dma               130 arch/arm/kernel/dma.c 	dma->invalid = 1;
dma               140 arch/arm/kernel/dma.c 	dma_t *dma = dma_channel(chan);
dma               142 arch/arm/kernel/dma.c 	if (dma->active)
dma               145 arch/arm/kernel/dma.c 	dma->sg = NULL;
dma               146 arch/arm/kernel/dma.c 	dma->addr = addr;
dma               147 arch/arm/kernel/dma.c 	dma->invalid = 1;
dma               157 arch/arm/kernel/dma.c 	dma_t *dma = dma_channel(chan);
dma               159 arch/arm/kernel/dma.c 	if (dma->active)
dma               162 arch/arm/kernel/dma.c 	dma->sg = NULL;
dma               163 arch/arm/kernel/dma.c 	dma->count = count;
dma               164 arch/arm/kernel/dma.c 	dma->invalid = 1;
dma               172 arch/arm/kernel/dma.c 	dma_t *dma = dma_channel(chan);
dma               174 arch/arm/kernel/dma.c 	if (dma->active)
dma               177 arch/arm/kernel/dma.c 	dma->dma_mode = mode;
dma               178 arch/arm/kernel/dma.c 	dma->invalid = 1;
dma               186 arch/arm/kernel/dma.c 	dma_t *dma = dma_channel(chan);
dma               188 arch/arm/kernel/dma.c 	if (!dma->lock)
dma               191 arch/arm/kernel/dma.c 	if (dma->active == 0) {
dma               192 arch/arm/kernel/dma.c 		dma->active = 1;
dma               193 arch/arm/kernel/dma.c 		dma->d_ops->enable(chan, dma);
dma               207 arch/arm/kernel/dma.c 	dma_t *dma = dma_channel(chan);
dma               209 arch/arm/kernel/dma.c 	if (!dma->lock)
dma               212 arch/arm/kernel/dma.c 	if (dma->active == 1) {
dma               213 arch/arm/kernel/dma.c 		dma->active = 0;
dma               214 arch/arm/kernel/dma.c 		dma->d_ops->disable(chan, dma);
dma               229 arch/arm/kernel/dma.c 	dma_t *dma = dma_channel(chan);
dma               230 arch/arm/kernel/dma.c 	return dma->active;
dma               242 arch/arm/kernel/dma.c 	dma_t *dma = dma_channel(chan);
dma               245 arch/arm/kernel/dma.c 	if (dma->d_ops->setspeed)
dma               246 arch/arm/kernel/dma.c 		ret = dma->d_ops->setspeed(chan, dma, cycle_ns);
dma               247 arch/arm/kernel/dma.c 	dma->speed = ret;
dma               253 arch/arm/kernel/dma.c 	dma_t *dma = dma_channel(chan);
dma               256 arch/arm/kernel/dma.c 	if (dma->d_ops->residue)
dma               257 arch/arm/kernel/dma.c 		ret = dma->d_ops->residue(chan, dma);
dma               269 arch/arm/kernel/dma.c 		dma_t *dma = dma_channel(i);
dma               270 arch/arm/kernel/dma.c 		if (dma && dma->lock)
dma               271 arch/arm/kernel/dma.c 			seq_printf(m, "%2d: %s\n", i, dma->device_id);
dma                22 arch/arm/mach-davinci/sram.c void *sram_alloc(size_t len, dma_addr_t *dma)
dma                26 arch/arm/mach-davinci/sram.c 	if (dma)
dma                27 arch/arm/mach-davinci/sram.c 		*dma = 0;
dma                28 arch/arm/mach-davinci/sram.c 	if (!sram_pool || (dma && !dma_base))
dma                31 arch/arm/mach-davinci/sram.c 	return gen_pool_dma_alloc(sram_pool, len, dma);
dma                21 arch/arm/mach-davinci/sram.h extern void *sram_alloc(size_t len, dma_addr_t *dma);
dma                25 arch/arm/mach-footbridge/dma.c static int fb_dma_request(unsigned int chan, dma_t *dma)
dma                30 arch/arm/mach-footbridge/dma.c static void fb_dma_enable(unsigned int chan, dma_t *dma)
dma                34 arch/arm/mach-footbridge/dma.c static void fb_dma_disable(unsigned int chan, dma_t *dma)
dma                49 arch/arm/mach-footbridge/dma.c 	dma[_DC21285_DMA(0)].d_ops = &fb_dma_ops;
dma                50 arch/arm/mach-footbridge/dma.c 	dma[_DC21285_DMA(1)].d_ops = &fb_dma_ops;
dma               485 arch/arm/mach-footbridge/netwinder-hw.c static inline void rwa010_waveartist_init(int base, int irq, int dma)
dma               501 arch/arm/mach-footbridge/netwinder-hw.c 	WRITE_RWA(0x74, dma);
dma               502 arch/arm/mach-footbridge/netwinder-hw.c 	dprintk(" dma: %d (%d)\n", inb(0x203), dma);
dma               507 arch/arm/mach-footbridge/netwinder-hw.c static inline void rwa010_soundblaster_init(int sb_base, int al_base, int irq, int dma)
dma               525 arch/arm/mach-footbridge/netwinder-hw.c 	WRITE_RWA(0x74, dma);
dma               526 arch/arm/mach-footbridge/netwinder-hw.c 	dprintk("%d (%d)\n", inb(0x203), dma);
dma                73 arch/arm/mach-imx/devices/platform-imx-ssi.c 	.start = data->dma ## _name,					\
dma                74 arch/arm/mach-imx/devices/platform-imx-ssi.c 	.end = data->dma ## _name,					\
dma                47 arch/arm/mach-mmp/devices.c 		if (desc->dma[i] == 0)
dma                50 arch/arm/mach-mmp/devices.c 		res[nres].start	= desc->dma[i];
dma                51 arch/arm/mach-mmp/devices.c 		res[nres].end	= desc->dma[i];
dma                17 arch/arm/mach-mmp/devices.h 	int		dma[MAX_RESOURCE_DMA];
dma                28 arch/arm/mach-mmp/devices.h 	.dma		= { _dma },					\
dma                39 arch/arm/mach-mmp/devices.h 	.dma		= { _dma },					\
dma                50 arch/arm/mach-mmp/devices.h 	.dma		= { _dma },					\
dma                26 arch/arm/mach-rpc/dma.c 	struct dma_struct	dma;
dma                58 arch/arm/mach-rpc/dma.c 	if (idma->dma.sg) {
dma                76 arch/arm/mach-rpc/dma.c 			if (idma->dma.sgcount > 1) {
dma                77 arch/arm/mach-rpc/dma.c 				idma->dma.sg = sg_next(idma->dma.sg);
dma                78 arch/arm/mach-rpc/dma.c 				idma->dma_addr = idma->dma.sg->dma_address;
dma                79 arch/arm/mach-rpc/dma.c 				idma->dma_len = idma->dma.sg->length;
dma                80 arch/arm/mach-rpc/dma.c 				idma->dma.sgcount--;
dma                82 arch/arm/mach-rpc/dma.c 				idma->dma.sg = NULL;
dma               134 arch/arm/mach-rpc/dma.c static int iomd_request_dma(unsigned int chan, dma_t *dma)
dma               136 arch/arm/mach-rpc/dma.c 	struct iomd_dma *idma = container_of(dma, struct iomd_dma, dma);
dma               139 arch/arm/mach-rpc/dma.c 			   0, idma->dma.device_id, idma);
dma               142 arch/arm/mach-rpc/dma.c static void iomd_free_dma(unsigned int chan, dma_t *dma)
dma               144 arch/arm/mach-rpc/dma.c 	struct iomd_dma *idma = container_of(dma, struct iomd_dma, dma);
dma               155 arch/arm/mach-rpc/dma.c static void iomd_enable_dma(unsigned int chan, dma_t *dma)
dma               157 arch/arm/mach-rpc/dma.c 	struct iomd_dma *idma = container_of(dma, struct iomd_dma, dma);
dma               161 arch/arm/mach-rpc/dma.c 	if (idma->dma.invalid) {
dma               162 arch/arm/mach-rpc/dma.c 		idma->dma.invalid = 0;
dma               168 arch/arm/mach-rpc/dma.c 		if (!idma->dma.sg) {
dma               169 arch/arm/mach-rpc/dma.c 			idma->dma.sg = &idma->dma.buf;
dma               170 arch/arm/mach-rpc/dma.c 			idma->dma.sgcount = 1;
dma               171 arch/arm/mach-rpc/dma.c 			idma->dma.buf.length = idma->dma.count;
dma               172 arch/arm/mach-rpc/dma.c 			idma->dma.buf.dma_address = dma_map_single(&isa_dma_dev,
dma               173 arch/arm/mach-rpc/dma.c 				idma->dma.addr, idma->dma.count,
dma               174 arch/arm/mach-rpc/dma.c 				idma->dma.dma_mode == DMA_MODE_READ ?
dma               178 arch/arm/mach-rpc/dma.c 		idma->dma_addr = idma->dma.sg->dma_address;
dma               179 arch/arm/mach-rpc/dma.c 		idma->dma_len = idma->dma.sg->length;
dma               185 arch/arm/mach-rpc/dma.c 	if (idma->dma.dma_mode == DMA_MODE_READ)
dma               192 arch/arm/mach-rpc/dma.c static void iomd_disable_dma(unsigned int chan, dma_t *dma)
dma               194 arch/arm/mach-rpc/dma.c 	struct iomd_dma *idma = container_of(dma, struct iomd_dma, dma);
dma               205 arch/arm/mach-rpc/dma.c static int iomd_set_dma_speed(unsigned int chan, dma_t *dma, int cycle)
dma               261 arch/arm/mach-rpc/dma.c 	struct dma_struct	dma;
dma               265 arch/arm/mach-rpc/dma.c static void floppy_enable_dma(unsigned int chan, dma_t *dma)
dma               267 arch/arm/mach-rpc/dma.c 	struct floppy_dma *fdma = container_of(dma, struct floppy_dma, dma);
dma               272 arch/arm/mach-rpc/dma.c 	if (fdma->dma.sg)
dma               275 arch/arm/mach-rpc/dma.c 	if (fdma->dma.dma_mode == DMA_MODE_READ) {
dma               285 arch/arm/mach-rpc/dma.c 	regs.ARM_r9  = fdma->dma.count;
dma               286 arch/arm/mach-rpc/dma.c 	regs.ARM_r10 = (unsigned long)fdma->dma.addr;
dma               299 arch/arm/mach-rpc/dma.c static void floppy_disable_dma(unsigned int chan, dma_t *dma)
dma               301 arch/arm/mach-rpc/dma.c 	struct floppy_dma *fdma = container_of(dma, struct floppy_dma, dma);
dma               306 arch/arm/mach-rpc/dma.c static int floppy_get_residue(unsigned int chan, dma_t *dma)
dma               323 arch/arm/mach-rpc/dma.c static void sound_enable_disable_dma(unsigned int chan, dma_t *dma)
dma               336 arch/arm/mach-rpc/dma.c 	.dma		= {
dma               378 arch/arm/mach-rpc/dma.c 		iomd_dma[i].dma.d_ops = &iomd_dma_ops;
dma               380 arch/arm/mach-rpc/dma.c 		ret = isa_dma_add(i, &iomd_dma[i].dma);
dma               385 arch/arm/mach-rpc/dma.c 	ret = isa_dma_add(DMA_VIRTUAL_FLOPPY, &floppy_dma.dma);
dma               705 arch/arm/mach-rpc/ecard.c 	ec->dma = NO_DMA;
dma               763 arch/arm/mach-rpc/ecard.c 	return sprintf(buf, "%u\n", ec->dma);
dma               765 arch/arm/mach-rpc/ecard.c static DEVICE_ATTR_RO(dma);
dma               963 arch/arm/mach-rpc/ecard.c 		ec->dma = 2 + slot;
dma              1662 arch/arm/mm/dma-mapping.c 	struct scatterlist *s = sg, *dma = sg, *start = sg;
dma              1675 arch/arm/mm/dma-mapping.c 			if (__map_sg_chunk(dev, start, size, &dma->dma_address,
dma              1679 arch/arm/mm/dma-mapping.c 			dma->dma_address += offset;
dma              1680 arch/arm/mm/dma-mapping.c 			dma->dma_length = size - offset;
dma              1684 arch/arm/mm/dma-mapping.c 			dma = sg_next(dma);
dma              1689 arch/arm/mm/dma-mapping.c 	if (__map_sg_chunk(dev, start, size, &dma->dma_address, dir, attrs,
dma              1693 arch/arm/mm/dma-mapping.c 	dma->dma_address += offset;
dma              1694 arch/arm/mm/dma-mapping.c 	dma->dma_length = size - offset;
dma               132 arch/m68k/include/asm/dvma.h #define DMA_ISBROKEN(dma)    ((dma)->revision == dvmarev1)
dma               133 arch/m68k/include/asm/dvma.h #define DMA_ISESC1(dma)      ((dma)->revision == dvmaesc1)
dma               215 arch/m68k/include/asm/dvma.h #define DMA_IRQ_ENTRY(dma, dregs) do { \
dma               216 arch/m68k/include/asm/dvma.h         if(DMA_ISBROKEN(dma)) DMA_INTSOFF(dregs); \
dma               219 arch/m68k/include/asm/dvma.h #define DMA_IRQ_EXIT(dma, dregs) do { \
dma               220 arch/m68k/include/asm/dvma.h 	if(DMA_ISBROKEN(dma)) DMA_INTSON(dregs); \
dma               224 arch/m68k/include/asm/dvma.h #define DMA_RESET(dma) do { \
dma               225 arch/m68k/include/asm/dvma.h 	struct sparc_dma_registers *regs = dma->regs;                      \
dma               234 arch/m68k/include/asm/dvma.h 	if(dma->revision>dvmarev1) regs->cond_reg |= DMA_3CLKS;            \
dma               235 arch/m68k/include/asm/dvma.h 	dma->running = 0;                                                  \
dma               351 arch/mips/include/asm/netlogic/xlr/fmn.h 	struct xlr_fmn_info dma;
dma               228 arch/mips/include/asm/octeon/cvmx-mio-defs.h 		uint64_t dma:1;
dma               234 arch/mips/include/asm/octeon/cvmx-mio-defs.h 		uint64_t dma:1;
dma               244 arch/mips/include/asm/octeon/cvmx-mio-defs.h 		uint64_t dma:1;
dma               250 arch/mips/include/asm/octeon/cvmx-mio-defs.h 		uint64_t dma:1;
dma                29 arch/mips/lantiq/xway/vmmc.c 	dma_addr_t dma;
dma                33 arch/mips/lantiq/xway/vmmc.c 						    &dma, GFP_KERNEL));
dma                93 arch/mips/netlogic/xlr/fmn-config.c 		total_credits += cfg->dma.credit_config[bkt];
dma               185 arch/mips/netlogic/xlr/fmn-config.c 	struct xlr_fmn_info *dma = &xlr_board_fmn_config.dma;
dma               199 arch/mips/netlogic/xlr/fmn-config.c 		setup_fmn_cc(dma, FMN_STNID_DMA_0,
dma               209 arch/mips/netlogic/xlr/fmn-config.c 		setup_fmn_cc(dma, FMN_STNID_DMA_0,
dma               226 arch/mips/netlogic/xlr/fmn-config.c 		setup_fmn_cc(dma, FMN_STNID_DMA_0,
dma               239 arch/mips/netlogic/xlr/fmn-config.c 		setup_fmn_cc(dma, FMN_STNID_DMA_0,
dma               251 arch/mips/netlogic/xlr/fmn-config.c 		setup_fmn_cc(dma, FMN_STNID_DMA_0,
dma               263 arch/mips/netlogic/xlr/fmn-config.c 		setup_fmn_cc(dma, FMN_STNID_DMA_0,
dma               277 arch/mips/netlogic/xlr/fmn-config.c 		setup_fmn_cc(dma, FMN_STNID_DMA_0,
dma                70 arch/powerpc/platforms/52xx/mpc52xx_lpbfifo.c 	int dma = !(req->flags & MPC52XX_LPBFIFO_FLAG_NO_DMA);
dma                79 arch/powerpc/platforms/52xx/mpc52xx_lpbfifo.c 	if (!dma) {
dma               174 arch/powerpc/platforms/52xx/mpc52xx_lpbfifo.c 	if (dma)
dma               229 arch/powerpc/platforms/52xx/mpc52xx_lpbfifo.c 	int dma, write, poll_dma;
dma               241 arch/powerpc/platforms/52xx/mpc52xx_lpbfifo.c 	dma = !(req->flags & MPC52XX_LPBFIFO_FLAG_NO_DMA);
dma               245 arch/powerpc/platforms/52xx/mpc52xx_lpbfifo.c 	if (dma && !write) {
dma               266 arch/powerpc/platforms/52xx/mpc52xx_lpbfifo.c 	if (!dma && !write) {
dma               287 arch/powerpc/platforms/52xx/mpc52xx_lpbfifo.c 	if (dma && (status & 0x11)) {
dma               379 arch/powerpc/platforms/52xx/mpc52xx_lpbfifo.c 	int dma = !(req->flags & MPC52XX_LPBFIFO_FLAG_NO_DMA);
dma               385 arch/powerpc/platforms/52xx/mpc52xx_lpbfifo.c 	if (dma && write)
dma               214 arch/powerpc/platforms/chrp/pci.c 	const unsigned int *dma;
dma               291 arch/powerpc/platforms/chrp/pci.c 				dma = of_get_property(dev, "system-dma-base",
dma               293 arch/powerpc/platforms/chrp/pci.c 				if (dma && len >= sizeof(*dma)) {
dma               294 arch/powerpc/platforms/chrp/pci.c 					dma = (unsigned int *)
dma               295 arch/powerpc/platforms/chrp/pci.c 						(((unsigned long)dma) +
dma               296 arch/powerpc/platforms/chrp/pci.c 						len - sizeof(*dma));
dma               297 arch/powerpc/platforms/chrp/pci.c 						pci_dram_offset = *dma;
dma               310 arch/powerpc/platforms/chrp/pci.c 		dma = of_get_property(dev, "ibm,dma-ranges", &len);
dma               311 arch/powerpc/platforms/chrp/pci.c 		if (index == 0 && dma != NULL && len >= 6 * sizeof(*dma)) {
dma               312 arch/powerpc/platforms/chrp/pci.c 			pci_dram_offset = dma[2] - dma[3];
dma                32 arch/s390/include/asm/linkage.h 	.section .dma.ex_table, "a" ;	\
dma               488 arch/s390/pci/pci_dma.c 	struct scatterlist *s = sg, *start = sg, *dma = sg;
dma               503 arch/s390/pci/pci_dma.c 					      &dma->dma_address, dir))
dma               506 arch/s390/pci/pci_dma.c 			dma->dma_address += offset;
dma               507 arch/s390/pci/pci_dma.c 			dma->dma_length = size - offset;
dma               511 arch/s390/pci/pci_dma.c 			dma = sg_next(dma);
dma               516 arch/s390/pci/pci_dma.c 	if (__s390_dma_map_sg(dev, start, size, &dma->dma_address, dir))
dma               519 arch/s390/pci/pci_dma.c 	dma->dma_address += offset;
dma               520 arch/s390/pci/pci_dma.c 	dma->dma_length = size - offset;
dma               201 arch/sparc/include/asm/parport.h 	int slot = p->dma;
dma                66 arch/unicore32/kernel/puv3-core.c 		.dma = 1,
dma               338 arch/x86/kernel/pci-calgary_64.c 		dma_addr_t dma = s->dma_address;
dma               344 arch/x86/kernel/pci-calgary_64.c 		npages = iommu_num_pages(dma, dmalen, PAGE_SIZE);
dma               345 arch/x86/kernel/pci-calgary_64.c 		iommu_free(tbl, dma, npages);
dma               139 block/blk-settings.c 	int dma = 0;
dma               149 block/blk-settings.c 		dma = 1;
dma               153 block/blk-settings.c 		dma = 1;
dma               156 block/blk-settings.c 	if (dma) {
dma                42 crypto/async_tx/async_pq.c 	struct dma_device *dma = chan->device;
dma                53 crypto/async_tx/async_pq.c 		pq_src_cnt = min(src_cnt, dma_maxpq(dma, dma_flags));
dma                78 crypto/async_tx/async_pq.c 			tx = dma->device_prep_dma_pq(chan, dma_dest,
dma                23 crypto/async_tx/async_raid6_recov.c 	struct dma_device *dma = chan ? chan->device : NULL;
dma                29 crypto/async_tx/async_raid6_recov.c 	if (dma)
dma                30 crypto/async_tx/async_raid6_recov.c 		unmap = dmaengine_get_unmap_data(dma->dev, 3, GFP_NOWAIT);
dma                33 crypto/async_tx/async_raid6_recov.c 		struct device *dev = dma->dev;
dma                50 crypto/async_tx/async_raid6_recov.c 		tx = dma->device_prep_dma_pq(chan, pq, unmap->addr, 2, coef,
dma                88 crypto/async_tx/async_raid6_recov.c 	struct dma_device *dma = chan ? chan->device : NULL;
dma                93 crypto/async_tx/async_raid6_recov.c 	if (dma)
dma                94 crypto/async_tx/async_raid6_recov.c 		unmap = dmaengine_get_unmap_data(dma->dev, 3, GFP_NOWAIT);
dma                98 crypto/async_tx/async_raid6_recov.c 		struct device *dev = dma->dev;
dma               115 crypto/async_tx/async_raid6_recov.c 		tx = dma->device_prep_dma_pq(chan, dma_dest, unmap->addr,
dma                26 crypto/async_tx/async_xor.c 	struct dma_device *dma = chan->device;
dma                41 crypto/async_tx/async_xor.c 		xor_src_cnt = min(src_cnt, (int)dma->max_xor);
dma                65 crypto/async_tx/async_xor.c 		tx = dma->device_prep_dma_xor(chan, dma_dest, src_list,
dma                75 crypto/async_tx/async_xor.c 			tx = dma->device_prep_dma_xor(chan, dma_dest,
dma               488 drivers/acpi/acpica/amlresrc.h 	struct aml_resource_dma dma;
dma                45 drivers/acpi/acpica/rsdumpinfo.c 	{ACPI_RSD_2BITFLAG, ACPI_RSD_OFFSET(dma.type), "Speed",
dma                47 drivers/acpi/acpica/rsdumpinfo.c 	{ACPI_RSD_1BITFLAG, ACPI_RSD_OFFSET(dma.bus_master), "Mastering",
dma                49 drivers/acpi/acpica/rsdumpinfo.c 	{ACPI_RSD_2BITFLAG, ACPI_RSD_OFFSET(dma.transfer), "Transfer Type",
dma                51 drivers/acpi/acpica/rsdumpinfo.c 	{ACPI_RSD_UINT8, ACPI_RSD_OFFSET(dma.channel_count), "Channel Count",
dma                53 drivers/acpi/acpica/rsdumpinfo.c 	{ACPI_RSD_SHORTLIST, ACPI_RSD_OFFSET(dma.channels[0]), "Channel List",
dma               225 drivers/acpi/acpica/rsirq.c 	{ACPI_RSC_2BITFLAG, ACPI_RS_OFFSET(data.dma.transfer),
dma               226 drivers/acpi/acpica/rsirq.c 	 AML_OFFSET(dma.flags),
dma               229 drivers/acpi/acpica/rsirq.c 	{ACPI_RSC_1BITFLAG, ACPI_RS_OFFSET(data.dma.bus_master),
dma               230 drivers/acpi/acpica/rsirq.c 	 AML_OFFSET(dma.flags),
dma               233 drivers/acpi/acpica/rsirq.c 	{ACPI_RSC_2BITFLAG, ACPI_RS_OFFSET(data.dma.type),
dma               234 drivers/acpi/acpica/rsirq.c 	 AML_OFFSET(dma.flags),
dma               239 drivers/acpi/acpica/rsirq.c 	{ACPI_RSC_BITMASK, ACPI_RS_OFFSET(data.dma.channels[0]),
dma               240 drivers/acpi/acpica/rsirq.c 	 AML_OFFSET(dma.dma_channel_mask),
dma               241 drivers/acpi/acpica/rsirq.c 	 ACPI_RS_OFFSET(data.dma.channel_count)}
dma               781 drivers/acpi/acpica/rsmisc.c if (resource->data.dma.transfer == 0x03) {
dma               517 drivers/ata/libata-acpi.c 	mode = ata_timing_cycle2mode(type, gtm->drive[unit].dma);
dma               139 drivers/ata/libata-core.c module_param_named(dma, libata_dma_mask, int, 0444);
dma               140 drivers/ata/libata-core.c MODULE_PARM_DESC(dma, "DMA enable/disable (0x1==ATA, 0x2==ATAPI, 0x4==CF)");
dma              1499 drivers/ata/libata-core.c 		int dma = (id[ATA_ID_CFA_MODES] >> 3) & 0x7;
dma              1505 drivers/ata/libata-core.c 		if (dma)
dma              1507 drivers/ata/libata-core.c 		if (dma > 1)
dma              2048 drivers/ata/libata-core.c 	bool dma = false;
dma              2065 drivers/ata/libata-core.c 		dma = true;
dma              2069 drivers/ata/libata-core.c 		dma = false;
dma              2080 drivers/ata/libata-core.c 	if (err_mask && dma) {
dma              4125 drivers/ata/libata-scsi.c static u8 ata_scsi_trusted_op(u32 len, bool send, bool dma)
dma              4130 drivers/ata/libata-scsi.c 		return dma ? ATA_CMD_TRUSTED_SND_DMA : ATA_CMD_TRUSTED_SND;
dma              4132 drivers/ata/libata-scsi.c 		return dma ? ATA_CMD_TRUSTED_RCV_DMA : ATA_CMD_TRUSTED_RCV;
dma              4144 drivers/ata/libata-scsi.c 	bool dma = !(qc->dev->flags & ATA_DFLAG_PIO);
dma              4169 drivers/ata/libata-scsi.c 	tf->protocol = dma ? ATA_PROT_DMA : ATA_PROT_PIO;
dma              4173 drivers/ata/libata-scsi.c 	tf->command = ata_scsi_trusted_op(len, send, dma);
dma               147 drivers/ata/pata_acpi.c 		acpi->gtm.drive[unit].dma = t->udma;
dma               150 drivers/ata/pata_acpi.c 		acpi->gtm.drive[unit].dma = t->cycle;
dma               297 drivers/ata/pata_amd.c 			 gtm->drive[0].dma, gtm->drive[1].dma, gtm->flags);
dma               154 drivers/ata/pata_atiixp.c 	int dma = adev->dma_mode;
dma               164 drivers/ata/pata_atiixp.c 		dma -= XFER_UDMA_0;
dma               168 drivers/ata/pata_atiixp.c 		udma_mode_data |= dma << (4 * dn);
dma               174 drivers/ata/pata_atiixp.c 		dma -= XFER_MW_DMA_0;
dma               179 drivers/ata/pata_atiixp.c 		mwdma_timing_data |= (mwdma_timings[dma] << timing_shift);
dma                54 drivers/ata/pata_icside.c 	unsigned int dma;
dma               230 drivers/ata/pata_icside.c 	BUG_ON(dma_channel_active(state->dma));
dma               237 drivers/ata/pata_icside.c 	set_dma_speed(state->dma, state->port[ap->port_no].speed[qc->dev->devno]);
dma               238 drivers/ata/pata_icside.c 	set_dma_sg(state->dma, qc->sg, qc->n_elem);
dma               239 drivers/ata/pata_icside.c 	set_dma_mode(state->dma, write ? DMA_MODE_WRITE : DMA_MODE_READ);
dma               250 drivers/ata/pata_icside.c 	BUG_ON(dma_channel_active(state->dma));
dma               251 drivers/ata/pata_icside.c 	enable_dma(state->dma);
dma               259 drivers/ata/pata_icside.c 	disable_dma(state->dma);
dma               287 drivers/ata/pata_icside.c 	if (ec->dma != NO_DMA && !request_dma(ec->dma, DRV_NAME)) {
dma               288 drivers/ata/pata_icside.c 		state->dma = ec->dma;
dma               496 drivers/ata/pata_icside.c 	state->dma = NO_DMA;
dma               591 drivers/ata/pata_icside.c 	if (state->dma != NO_DMA)
dma               592 drivers/ata/pata_icside.c 		free_dma(state->dma);
dma               287 drivers/ata/pata_it821x.c 	static const u16 dma[]	= 	{ 0x8866, 0x3222, 0x3121 };
dma               322 drivers/ata/pata_it821x.c 		itdev->mwdma[unit] = dma[mode_wanted];
dma               985 drivers/ata/pata_macio.c 				 void __iomem * base, void __iomem * dma)
dma              1003 drivers/ata/pata_macio.c 	ioaddr->bmdma_addr	= dma;
dma               414 drivers/ata/pata_mpc52xx.c 		int dma = adev->dma_mode - XFER_UDMA_0;
dma               415 drivers/ata/pata_mpc52xx.c 		rv = mpc52xx_ata_compute_udma_timings(priv, adev->devno, dma);
dma               417 drivers/ata/pata_mpc52xx.c 		int dma = adev->dma_mode - XFER_MW_DMA_0;
dma               418 drivers/ata/pata_mpc52xx.c 		rv = mpc52xx_ata_compute_mdma_timings(priv, adev->devno, dma);
dma               120 drivers/ata/pata_optidma.c 	int dma = adev->dma_mode - XFER_MW_DMA_0;
dma               172 drivers/ata/pata_optidma.c 		iowrite8(dma_data_rec_timing[pci_clock][dma], regio + READ_REG);
dma               173 drivers/ata/pata_optidma.c 		iowrite8(dma_data_rec_timing[pci_clock][dma], regio + WRITE_REG);
dma               123 drivers/ata/pata_sl82c105.c 	int dma = adev->dma_mode - XFER_MW_DMA_0;
dma               125 drivers/ata/pata_sl82c105.c 	pci_write_config_word(pdev, timing, dma_timing[dma]);
dma               144 drivers/ata/sata_dwc_460ex.c 	struct dw_dma_chip	*dma;
dma               242 drivers/ata/sata_dwc_460ex.c 	hsdev->dma = devm_kzalloc(&pdev->dev, sizeof(*hsdev->dma), GFP_KERNEL);
dma               243 drivers/ata/sata_dwc_460ex.c 	if (!hsdev->dma)
dma               246 drivers/ata/sata_dwc_460ex.c 	hsdev->dma->dev = &pdev->dev;
dma               247 drivers/ata/sata_dwc_460ex.c 	hsdev->dma->id = pdev->id;
dma               250 drivers/ata/sata_dwc_460ex.c 	hsdev->dma->irq = irq_of_parse_and_map(np, 1);
dma               251 drivers/ata/sata_dwc_460ex.c 	if (hsdev->dma->irq == NO_IRQ) {
dma               258 drivers/ata/sata_dwc_460ex.c 	hsdev->dma->regs = devm_ioremap_resource(&pdev->dev, res);
dma               259 drivers/ata/sata_dwc_460ex.c 	if (IS_ERR(hsdev->dma->regs))
dma               260 drivers/ata/sata_dwc_460ex.c 		return PTR_ERR(hsdev->dma->regs);
dma               263 drivers/ata/sata_dwc_460ex.c 	return dw_dma_probe(hsdev->dma);
dma               268 drivers/ata/sata_dwc_460ex.c 	if (!hsdev->dma)
dma               271 drivers/ata/sata_dwc_460ex.c 	dw_dma_remove(hsdev->dma);
dma               350 drivers/atm/eni.c 	u32 dma[RX_DMA_BUF*2];
dma               375 drivers/atm/eni.c 		dma[j++] = (here << MID_DMA_COUNT_SHIFT) | (vcc->vci
dma               377 drivers/atm/eni.c 		dma[j++] = 0;
dma               395 drivers/atm/eni.c 			dma[j++] = MID_DT_WORD | (init << MID_DMA_COUNT_SHIFT) |
dma               397 drivers/atm/eni.c 			dma[j++] = paddr;
dma               403 drivers/atm/eni.c 			dma[j++] = MID_DT_16W | ((words >> 4) <<
dma               406 drivers/atm/eni.c 			dma[j++] = paddr;
dma               413 drivers/atm/eni.c 			dma[j++] = MID_DT_8W | ((words >> 3) <<
dma               416 drivers/atm/eni.c 			dma[j++] = paddr;
dma               423 drivers/atm/eni.c 			dma[j++] = MID_DT_4W | ((words >> 2) <<
dma               426 drivers/atm/eni.c 			dma[j++] = paddr;
dma               433 drivers/atm/eni.c 			dma[j++] = MID_DT_2W | ((words >> 1) <<
dma               436 drivers/atm/eni.c 			dma[j++] = paddr;
dma               442 drivers/atm/eni.c 			dma[j++] = MID_DT_WORD | (words << MID_DMA_COUNT_SHIFT)
dma               444 drivers/atm/eni.c 			dma[j++] = paddr;
dma               448 drivers/atm/eni.c 		dma[j++] = (here << MID_DMA_COUNT_SHIFT) |
dma               450 drivers/atm/eni.c 		dma[j++] = 0;
dma               456 drivers/atm/eni.c 	dma[j-2] |= MID_DMA_END;
dma               470 drivers/atm/eni.c 		writel(dma[i*2],eni_dev->rx_dma+dma_wr*8);
dma               471 drivers/atm/eni.c 		writel(dma[i*2+1],eni_dev->rx_dma+dma_wr*8+4);
dma               934 drivers/atm/eni.c static inline void put_dma(int chan,u32 *dma,int *j,dma_addr_t paddr,
dma               952 drivers/atm/eni.c 		dma[(*j)++] = MID_DT_BYTE | (init << MID_DMA_COUNT_SHIFT) |
dma               954 drivers/atm/eni.c 		dma[(*j)++] = paddr;
dma               965 drivers/atm/eni.c 		dma[(*j)++] = MID_DT_WORD | (init << MID_DMA_COUNT_SHIFT) |
dma               967 drivers/atm/eni.c 		dma[(*j)++] = paddr;
dma               975 drivers/atm/eni.c 		dma[(*j)++] = MID_DT_16W | ((words >> 4) << MID_DMA_COUNT_SHIFT)
dma               977 drivers/atm/eni.c 		dma[(*j)++] = paddr;
dma               986 drivers/atm/eni.c 		dma[(*j)++] = MID_DT_8W | ((words >> 3) << MID_DMA_COUNT_SHIFT)
dma               988 drivers/atm/eni.c 		dma[(*j)++] = paddr;
dma               997 drivers/atm/eni.c 		dma[(*j)++] = MID_DT_4W | ((words >> 2) << MID_DMA_COUNT_SHIFT)
dma               999 drivers/atm/eni.c 		dma[(*j)++] = paddr;
dma              1008 drivers/atm/eni.c 		dma[(*j)++] = MID_DT_2W | ((words >> 1) << MID_DMA_COUNT_SHIFT)
dma              1010 drivers/atm/eni.c 		dma[(*j)++] = paddr;
dma              1018 drivers/atm/eni.c 		dma[(*j)++] = MID_DT_WORD | (words << MID_DMA_COUNT_SHIFT) |
dma              1020 drivers/atm/eni.c 		dma[(*j)++] = paddr;
dma              1026 drivers/atm/eni.c 		dma[(*j)++] = MID_DT_BYTE | (size << MID_DMA_COUNT_SHIFT) |
dma              1028 drivers/atm/eni.c 		dma[(*j)++] = paddr;
dma              1122 drivers/atm/eni.c 	eni_dev->dma[j++] = (((tx->tx_pos+TX_DESCR_SIZE) & (tx->words-1)) <<
dma              1127 drivers/atm/eni.c 		if (aal5) put_dma(tx->index,eni_dev->dma,&j,paddr,skb->len);
dma              1128 drivers/atm/eni.c 		else put_dma(tx->index,eni_dev->dma,&j,paddr+4,skb->len-4);
dma              1133 drivers/atm/eni.c 				put_dma(tx->index,eni_dev->dma,&j,(unsigned long)
dma              1137 drivers/atm/eni.c 				put_dma(tx->index,eni_dev->dma,&j,(unsigned long)
dma              1143 drivers/atm/eni.c 		put_dma(tx->index, eni_dev->dma, &j, eni_dev->zero.dma,
dma              1147 drivers/atm/eni.c 	eni_dev->dma[j++] = (((tx->tx_pos+size) & (tx->words-1)) <<
dma              1168 drivers/atm/eni.c 		writel(eni_dev->dma[i*2],eni_dev->tx_dma+dma_wr*8);
dma              1169 drivers/atm/eni.c 		writel(eni_dev->dma[i*2+1],eni_dev->tx_dma+dma_wr*8+4);
dma              2257 drivers/atm/eni.c 					ENI_ZEROES_SIZE, &zero->dma, GFP_KERNEL);
dma              2288 drivers/atm/eni.c 	dma_free_coherent(&pci_dev->dev, ENI_ZEROES_SIZE, zero->addr, zero->dma);
dma              2313 drivers/atm/eni.c 	dma_free_coherent(&pdev->dev, ENI_ZEROES_SIZE, zero->addr, zero->dma);
dma                90 drivers/atm/eni.h 	u32 dma[TX_DMA_BUF*2];		/* DMA request scratch area */
dma                93 drivers/atm/eni.h 		dma_addr_t dma;
dma              1202 drivers/atm/iphase.c 	writel(1, iadev->dma+IPHASE5575_RX_COUNTER);   
dma              1289 drivers/atm/iphase.c   dle_lp = readl(iadev->dma+IPHASE5575_RX_LIST_ADDR) & (sizeof(struct dle)*DLE_ENTRIES - 1);  
dma              1451 drivers/atm/iphase.c 	       iadev->dma + IPHASE5575_RX_LIST_ADDR);  
dma              1453 drivers/atm/iphase.c                       iadev->dma+IPHASE5575_TX_LIST_ADDR,
dma              1454 drivers/atm/iphase.c                       readl(iadev->dma + IPHASE5575_TX_LIST_ADDR));
dma              1456 drivers/atm/iphase.c                       iadev->dma+IPHASE5575_RX_LIST_ADDR,
dma              1457 drivers/atm/iphase.c                       readl(iadev->dma + IPHASE5575_RX_LIST_ADDR));)
dma              1696 drivers/atm/iphase.c         dle_lp = readl(iadev->dma+IPHASE5575_TX_LIST_ADDR) & 
dma              1935 drivers/atm/iphase.c 	       iadev->dma + IPHASE5575_TX_LIST_ADDR);  
dma              2392 drivers/atm/iphase.c 	iadev->dma = base + PHY_BASE;  
dma              3056 drivers/atm/iphase.c 	writel(2, iadev->dma+IPHASE5575_TX_COUNTER);  
dma               991 drivers/atm/iphase.h 	u32 __iomem *dma;	/* Base pointer into DMA control registers. */
dma               116 drivers/atm/nicstar.c 		(scq->dma + ((unsigned long)(p) - (unsigned long)(scq)->org))
dma               249 drivers/atm/nicstar.c 			  card->rsq.org, card->rsq.dma);
dma               251 drivers/atm/nicstar.c 			  card->tsq.org, card->tsq.dma);
dma               531 drivers/atm/nicstar.c 					   &card->tsq.dma, GFP_KERNEL);
dma               544 drivers/atm/nicstar.c 	writel(ALIGN(card->tsq.dma, NS_TSQ_ALIGNMENT), card->membase + TSQB);
dma               550 drivers/atm/nicstar.c 					   &card->rsq.dma, GFP_KERNEL);
dma               563 drivers/atm/nicstar.c 	writel(ALIGN(card->rsq.dma, NS_RSQ_ALIGNMENT), card->membase + RSQB);
dma               869 drivers/atm/nicstar.c 				      2 * size,  &scq->dma, GFP_KERNEL);
dma               879 drivers/atm/nicstar.c 				  2 * size, scq->org, scq->dma);
dma               936 drivers/atm/nicstar.c 			  scq->org, scq->dma);
dma               643 drivers/atm/nicstar.h 	u32 dma;
dma               650 drivers/atm/nicstar.h         (((struct ns_skb_prv *)(ATM_SKB(skb)+1))->dma)
dma               656 drivers/atm/nicstar.h         dma_addr_t dma;
dma               664 drivers/atm/nicstar.h 	dma_addr_t dma;
dma               682 drivers/atm/nicstar.h         dma_addr_t dma;
dma                74 drivers/block/rsxx/dma.c 	struct rsxx_dma	*dma;
dma               114 drivers/block/rsxx/dma.c static unsigned int get_dma_size(struct rsxx_dma *dma)
dma               116 drivers/block/rsxx/dma.c 	if (dma->sub_page.cnt)
dma               117 drivers/block/rsxx/dma.c 		return dma->sub_page.cnt << 9;
dma               126 drivers/block/rsxx/dma.c 			    struct rsxx_dma *dma)
dma               128 drivers/block/rsxx/dma.c 	trackers->list[tag].dma = dma;
dma               134 drivers/block/rsxx/dma.c 	return trackers->list[tag].dma;
dma               157 drivers/block/rsxx/dma.c 	trackers->list[tag].dma = NULL;
dma               210 drivers/block/rsxx/dma.c static void rsxx_free_dma(struct rsxx_dma_ctrl *ctrl, struct rsxx_dma *dma)
dma               212 drivers/block/rsxx/dma.c 	if (dma->cmd != HW_CMD_BLK_DISCARD) {
dma               213 drivers/block/rsxx/dma.c 		if (!dma_mapping_error(&ctrl->card->dev->dev, dma->dma_addr)) {
dma               214 drivers/block/rsxx/dma.c 			dma_unmap_page(&ctrl->card->dev->dev, dma->dma_addr,
dma               215 drivers/block/rsxx/dma.c 				       get_dma_size(dma),
dma               216 drivers/block/rsxx/dma.c 				       dma->cmd == HW_CMD_BLK_WRITE ?
dma               222 drivers/block/rsxx/dma.c 	kmem_cache_free(rsxx_dma_pool, dma);
dma               226 drivers/block/rsxx/dma.c 				  struct rsxx_dma *dma,
dma               236 drivers/block/rsxx/dma.c 	if (dma->cb)
dma               237 drivers/block/rsxx/dma.c 		dma->cb(ctrl->card, dma->cb_data, status ? 1 : 0);
dma               239 drivers/block/rsxx/dma.c 	rsxx_free_dma(ctrl, dma);
dma               245 drivers/block/rsxx/dma.c 	struct rsxx_dma *dma;
dma               249 drivers/block/rsxx/dma.c 	list_for_each_entry_safe(dma, tmp, q, list) {
dma               250 drivers/block/rsxx/dma.c 		list_del(&dma->list);
dma               252 drivers/block/rsxx/dma.c 			rsxx_complete_dma(ctrl, dma, DMA_CANCELLED);
dma               254 drivers/block/rsxx/dma.c 			rsxx_free_dma(ctrl, dma);
dma               262 drivers/block/rsxx/dma.c 				 struct rsxx_dma *dma)
dma               270 drivers/block/rsxx/dma.c 	list_add(&dma->list, &ctrl->queue);
dma               275 drivers/block/rsxx/dma.c 				      struct rsxx_dma *dma,
dma               283 drivers/block/rsxx/dma.c 		dma->cmd, dma->laddr, hw_st);
dma               292 drivers/block/rsxx/dma.c 	switch (dma->cmd) {
dma               296 drivers/block/rsxx/dma.c 				dma->cmd = HW_CMD_BLK_RECON_READ;
dma               331 drivers/block/rsxx/dma.c 			   dma->cmd, dma->laddr, hw_st);
dma               338 drivers/block/rsxx/dma.c 		rsxx_requeue_dma(ctrl, dma);
dma               340 drivers/block/rsxx/dma.c 		rsxx_complete_dma(ctrl, dma, status);
dma               384 drivers/block/rsxx/dma.c 	struct rsxx_dma *dma;
dma               409 drivers/block/rsxx/dma.c 		dma = list_entry(ctrl->queue.next, struct rsxx_dma, list);
dma               410 drivers/block/rsxx/dma.c 		list_del(&dma->list);
dma               421 drivers/block/rsxx/dma.c 			rsxx_complete_dma(ctrl, dma, DMA_CANCELLED);
dma               425 drivers/block/rsxx/dma.c 		if (dma->cmd != HW_CMD_BLK_DISCARD) {
dma               426 drivers/block/rsxx/dma.c 			if (dma->cmd == HW_CMD_BLK_WRITE)
dma               441 drivers/block/rsxx/dma.c 			dma->dma_addr = dma_map_page(&ctrl->card->dev->dev, dma->page,
dma               442 drivers/block/rsxx/dma.c 					dma->pg_off, dma->sub_page.cnt << 9, dir);
dma               443 drivers/block/rsxx/dma.c 			if (dma_mapping_error(&ctrl->card->dev->dev, dma->dma_addr)) {
dma               445 drivers/block/rsxx/dma.c 				rsxx_complete_dma(ctrl, dma, DMA_CANCELLED);
dma               450 drivers/block/rsxx/dma.c 		set_tracker_dma(ctrl->trackers, tag, dma);
dma               451 drivers/block/rsxx/dma.c 		hw_cmd_buf[ctrl->cmd.idx].command  = dma->cmd;
dma               455 drivers/block/rsxx/dma.c 					((dma->sub_page.cnt & 0x7) << 4) |
dma               456 drivers/block/rsxx/dma.c 					 (dma->sub_page.off & 0x7);
dma               459 drivers/block/rsxx/dma.c 					cpu_to_le32(dma->laddr);
dma               462 drivers/block/rsxx/dma.c 					cpu_to_le64(dma->dma_addr);
dma               466 drivers/block/rsxx/dma.c 			ctrl->id, dma->laddr, tag, ctrl->cmd.idx);
dma               471 drivers/block/rsxx/dma.c 		if (dma->cmd == HW_CMD_BLK_WRITE)
dma               473 drivers/block/rsxx/dma.c 		else if (dma->cmd == HW_CMD_BLK_DISCARD)
dma               496 drivers/block/rsxx/dma.c 	struct rsxx_dma *dma;
dma               525 drivers/block/rsxx/dma.c 		dma = get_tracker_dma(ctrl->trackers, tag);
dma               526 drivers/block/rsxx/dma.c 		if (dma == NULL) {
dma               541 drivers/block/rsxx/dma.c 			ctrl->id, dma->laddr, tag, status, count,
dma               550 drivers/block/rsxx/dma.c 			rsxx_handle_dma_error(ctrl, dma, status);
dma               552 drivers/block/rsxx/dma.c 			rsxx_complete_dma(ctrl, dma, 0);
dma               606 drivers/block/rsxx/dma.c 	struct rsxx_dma *dma;
dma               608 drivers/block/rsxx/dma.c 	dma = kmem_cache_alloc(rsxx_dma_pool, GFP_KERNEL);
dma               609 drivers/block/rsxx/dma.c 	if (!dma)
dma               612 drivers/block/rsxx/dma.c 	dma->cmd          = HW_CMD_BLK_DISCARD;
dma               613 drivers/block/rsxx/dma.c 	dma->laddr        = laddr;
dma               614 drivers/block/rsxx/dma.c 	dma->dma_addr     = 0;
dma               615 drivers/block/rsxx/dma.c 	dma->sub_page.off = 0;
dma               616 drivers/block/rsxx/dma.c 	dma->sub_page.cnt = 0;
dma               617 drivers/block/rsxx/dma.c 	dma->page         = NULL;
dma               618 drivers/block/rsxx/dma.c 	dma->pg_off       = 0;
dma               619 drivers/block/rsxx/dma.c 	dma->cb	          = cb;
dma               620 drivers/block/rsxx/dma.c 	dma->cb_data      = cb_data;
dma               622 drivers/block/rsxx/dma.c 	dev_dbg(CARD_TO_DEV(card), "Queuing[D] laddr %x\n", dma->laddr);
dma               624 drivers/block/rsxx/dma.c 	list_add_tail(&dma->list, q);
dma               640 drivers/block/rsxx/dma.c 	struct rsxx_dma *dma;
dma               642 drivers/block/rsxx/dma.c 	dma = kmem_cache_alloc(rsxx_dma_pool, GFP_KERNEL);
dma               643 drivers/block/rsxx/dma.c 	if (!dma)
dma               646 drivers/block/rsxx/dma.c 	dma->cmd          = dir ? HW_CMD_BLK_WRITE : HW_CMD_BLK_READ;
dma               647 drivers/block/rsxx/dma.c 	dma->laddr        = laddr;
dma               648 drivers/block/rsxx/dma.c 	dma->sub_page.off = (dma_off >> 9);
dma               649 drivers/block/rsxx/dma.c 	dma->sub_page.cnt = (dma_len >> 9);
dma               650 drivers/block/rsxx/dma.c 	dma->page         = page;
dma               651 drivers/block/rsxx/dma.c 	dma->pg_off       = pg_off;
dma               652 drivers/block/rsxx/dma.c 	dma->cb	          = cb;
dma               653 drivers/block/rsxx/dma.c 	dma->cb_data      = cb_data;
dma               657 drivers/block/rsxx/dma.c 		dir ? 'W' : 'R', dma->laddr, dma->sub_page.off,
dma               658 drivers/block/rsxx/dma.c 		dma->sub_page.cnt, dma->page, dma->pg_off);
dma               661 drivers/block/rsxx/dma.c 	list_add_tail(&dma->list, q);
dma               818 drivers/block/rsxx/dma.c 		ctrl->trackers->list[i].dma = NULL;
dma               964 drivers/block/rsxx/dma.c 	struct rsxx_dma *dma;
dma               970 drivers/block/rsxx/dma.c 		dma = get_tracker_dma(ctrl->trackers, i);
dma               971 drivers/block/rsxx/dma.c 		if (dma) {
dma               973 drivers/block/rsxx/dma.c 			rsxx_complete_dma(ctrl, dma, DMA_CANCELLED);
dma              1024 drivers/block/rsxx/dma.c 	struct rsxx_dma *dma;
dma              1036 drivers/block/rsxx/dma.c 			dma = get_tracker_dma(card->ctrl[i].trackers, j);
dma              1037 drivers/block/rsxx/dma.c 			if (dma == NULL)
dma              1040 drivers/block/rsxx/dma.c 			if (dma->cmd == HW_CMD_BLK_WRITE)
dma              1042 drivers/block/rsxx/dma.c 			else if (dma->cmd == HW_CMD_BLK_DISCARD)
dma              1047 drivers/block/rsxx/dma.c 			if (dma->cmd != HW_CMD_BLK_DISCARD) {
dma              1048 drivers/block/rsxx/dma.c 				dma_unmap_page(&card->dev->dev, dma->dma_addr,
dma              1049 drivers/block/rsxx/dma.c 					       get_dma_size(dma),
dma              1050 drivers/block/rsxx/dma.c 					       dma->cmd == HW_CMD_BLK_WRITE ?
dma              1055 drivers/block/rsxx/dma.c 			list_add_tail(&dma->list, &issued_dmas[i]);
dma               178 drivers/block/swim3.c 	struct dbdma_regs __iomem *dma;	/* DMA controller registers */
dma               421 drivers/block/swim3.c 	struct dbdma_regs __iomem *dr = fs->dma;
dma               615 drivers/block/swim3.c 	struct dbdma_regs __iomem *dr = fs->dma;
dma               714 drivers/block/swim3.c 		dr = fs->dma;
dma              1124 drivers/block/swim3.c 	fs->dma = (struct dbdma_regs __iomem *)
dma              1126 drivers/block/swim3.c 	if (fs->dma == NULL) {
dma              1162 drivers/block/swim3.c 	iounmap(fs->dma);
dma               103 drivers/char/virtio_console.c 	dma_addr_t dma;
dma               381 drivers/char/virtio_console.c 		dma_free_coherent(buf->dev, buf->size, buf->buf, buf->dma);
dma               447 drivers/char/virtio_console.c 		buf->buf = dma_alloc_coherent(buf->dev, buf_size, &buf->dma,
dma               511 drivers/clk/zynq/clkc.c 	clks[dma] = clk_register_gate(NULL, clk_output_name[dma],
dma               640 drivers/crypto/atmel-aes.c 				    struct atmel_aes_dma *dma)
dma               655 drivers/crypto/atmel-aes.c 			dma->nents = nents+1;
dma               656 drivers/crypto/atmel-aes.c 			dma->remainder = sg->length - len;
dma               670 drivers/crypto/atmel-aes.c static inline void atmel_aes_restore_sg(const struct atmel_aes_dma *dma)
dma               672 drivers/crypto/atmel-aes.c 	struct scatterlist *sg = dma->sg;
dma               673 drivers/crypto/atmel-aes.c 	int nents = dma->nents;
dma               675 drivers/crypto/atmel-aes.c 	if (!dma->remainder)
dma               684 drivers/crypto/atmel-aes.c 	sg->length += dma->remainder;
dma               787 drivers/crypto/atmel-aes.c 	struct atmel_aes_dma *dma;
dma               799 drivers/crypto/atmel-aes.c 		dma = &dd->src;
dma               805 drivers/crypto/atmel-aes.c 		dma = &dd->dst;
dma               814 drivers/crypto/atmel-aes.c 	err = dmaengine_slave_config(dma->chan, &config);
dma               818 drivers/crypto/atmel-aes.c 	desc = dmaengine_prep_slave_sg(dma->chan, dma->sg, dma->sg_len, dir,
dma               826 drivers/crypto/atmel-aes.c 	dma_async_issue_pending(dma->chan);
dma               834 drivers/crypto/atmel-aes.c 	struct atmel_aes_dma *dma;
dma               838 drivers/crypto/atmel-aes.c 		dma = &dd->src;
dma               842 drivers/crypto/atmel-aes.c 		dma = &dd->dst;
dma               849 drivers/crypto/atmel-aes.c 	dmaengine_terminate_all(dma->chan);
dma               473 drivers/crypto/atmel-sha.c static void atmel_sha_write_ctrl(struct atmel_sha_dev *dd, int dma)
dma               479 drivers/crypto/atmel-sha.c 	if (likely(dma)) {
dma              1458 drivers/crypto/atmel-sha.c 	struct atmel_sha_dma *dma = &dd->dma_lch_in;
dma              1473 drivers/crypto/atmel-sha.c 			dma->nents = nents + 1;
dma              1474 drivers/crypto/atmel-sha.c 			dma->last_sg_length = sg->length;
dma              1492 drivers/crypto/atmel-sha.c 	struct atmel_sha_dma *dma = &dd->dma_lch_in;
dma              1496 drivers/crypto/atmel-sha.c 	dmaengine_terminate_all(dma->chan);
dma              1497 drivers/crypto/atmel-sha.c 	dma_unmap_sg(dd->dev, dma->sg, dma->nents, DMA_TO_DEVICE);
dma              1499 drivers/crypto/atmel-sha.c 	sg = dma->sg;
dma              1500 drivers/crypto/atmel-sha.c 	for (nents = 0; nents < dma->nents - 1; ++nents)
dma              1502 drivers/crypto/atmel-sha.c 	sg->length = dma->last_sg_length;
dma              1513 drivers/crypto/atmel-sha.c 	struct atmel_sha_dma *dma = &dd->dma_lch_in;
dma              1514 drivers/crypto/atmel-sha.c 	struct dma_slave_config *config = &dma->dma_conf;
dma              1515 drivers/crypto/atmel-sha.c 	struct dma_chan *chan = dma->chan;
dma              1527 drivers/crypto/atmel-sha.c 	dma->sg = src;
dma              1528 drivers/crypto/atmel-sha.c 	sg_len = dma_map_sg(dd->dev, dma->sg, dma->nents, DMA_TO_DEVICE);
dma              1540 drivers/crypto/atmel-sha.c 	desc = dmaengine_prep_slave_sg(chan, dma->sg, sg_len, DMA_MEM_TO_DEV,
dma              1559 drivers/crypto/atmel-sha.c 	dma_unmap_sg(dd->dev, dma->sg, dma->nents, DMA_TO_DEVICE);
dma               283 drivers/crypto/axis/artpec6_crypto.c 	struct artpec6_crypto_dma_descriptors *dma;
dma               430 drivers/crypto/axis/artpec6_crypto.c 	struct artpec6_crypto_dma_descriptors *dma = common->dma;
dma               434 drivers/crypto/axis/artpec6_crypto.c 	list_for_each_entry_safe(b, next, &dma->bounce_buffers, list) {
dma               483 drivers/crypto/axis/artpec6_crypto.c 	struct artpec6_crypto_dma_descriptors *dma = common->dma;
dma               489 drivers/crypto/axis/artpec6_crypto.c 	ind = FIELD_PREP(PDMA_IN_DESCRQ_PUSH_LEN, dma->in_cnt - 1) |
dma               490 drivers/crypto/axis/artpec6_crypto.c 	      FIELD_PREP(PDMA_IN_DESCRQ_PUSH_ADDR, dma->in_dma_addr >> 6);
dma               492 drivers/crypto/axis/artpec6_crypto.c 	statd = FIELD_PREP(PDMA_IN_STATQ_PUSH_LEN, dma->in_cnt - 1) |
dma               493 drivers/crypto/axis/artpec6_crypto.c 		FIELD_PREP(PDMA_IN_STATQ_PUSH_ADDR, dma->stat_dma_addr >> 6);
dma               495 drivers/crypto/axis/artpec6_crypto.c 	outd = FIELD_PREP(PDMA_OUT_DESCRQ_PUSH_LEN, dma->out_cnt - 1) |
dma               496 drivers/crypto/axis/artpec6_crypto.c 	       FIELD_PREP(PDMA_OUT_DESCRQ_PUSH_ADDR, dma->out_dma_addr >> 6);
dma               517 drivers/crypto/axis/artpec6_crypto.c 	struct artpec6_crypto_dma_descriptors *dma = common->dma;
dma               519 drivers/crypto/axis/artpec6_crypto.c 	dma->out_cnt = 0;
dma               520 drivers/crypto/axis/artpec6_crypto.c 	dma->in_cnt = 0;
dma               521 drivers/crypto/axis/artpec6_crypto.c 	dma->map_count = 0;
dma               522 drivers/crypto/axis/artpec6_crypto.c 	INIT_LIST_HEAD(&dma->bounce_buffers);
dma               547 drivers/crypto/axis/artpec6_crypto.c 	struct artpec6_crypto_dma_descriptors *dma = common->dma;
dma               550 drivers/crypto/axis/artpec6_crypto.c 	if (dma->out_cnt >= PDMA_DESCR_COUNT ||
dma               556 drivers/crypto/axis/artpec6_crypto.c 	d = &dma->out[dma->out_cnt++];
dma               580 drivers/crypto/axis/artpec6_crypto.c 	struct artpec6_crypto_dma_descriptors *dma = common->dma;
dma               583 drivers/crypto/axis/artpec6_crypto.c 	if (dma->out_cnt >= PDMA_DESCR_COUNT ||
dma               590 drivers/crypto/axis/artpec6_crypto.c 	d = &dma->out[dma->out_cnt++];
dma               606 drivers/crypto/axis/artpec6_crypto.c 	struct artpec6_crypto_dma_descriptors *dma = common->dma;
dma               613 drivers/crypto/axis/artpec6_crypto.c 	if (dma->map_count >= ARRAY_SIZE(dma->maps))
dma               620 drivers/crypto/axis/artpec6_crypto.c 	map = &dma->maps[dma->map_count++];
dma               646 drivers/crypto/axis/artpec6_crypto.c 	struct artpec6_crypto_dma_descriptors *dma = common->dma;
dma               649 drivers/crypto/axis/artpec6_crypto.c 	ret = artpec6_crypto_dma_map_single(common, dma->in,
dma               650 drivers/crypto/axis/artpec6_crypto.c 				sizeof(dma->in[0]) * dma->in_cnt,
dma               651 drivers/crypto/axis/artpec6_crypto.c 				DMA_TO_DEVICE, &dma->in_dma_addr);
dma               655 drivers/crypto/axis/artpec6_crypto.c 	ret = artpec6_crypto_dma_map_single(common, dma->out,
dma               656 drivers/crypto/axis/artpec6_crypto.c 				sizeof(dma->out[0]) * dma->out_cnt,
dma               657 drivers/crypto/axis/artpec6_crypto.c 				DMA_TO_DEVICE, &dma->out_dma_addr);
dma               662 drivers/crypto/axis/artpec6_crypto.c 	dma->stat[dma->in_cnt - 1] = 0;
dma               669 drivers/crypto/axis/artpec6_crypto.c 				dma->stat,
dma               670 drivers/crypto/axis/artpec6_crypto.c 				sizeof(dma->stat[0]) * dma->in_cnt,
dma               672 drivers/crypto/axis/artpec6_crypto.c 				&dma->stat_dma_addr);
dma               678 drivers/crypto/axis/artpec6_crypto.c 	struct artpec6_crypto_dma_descriptors *dma = common->dma;
dma               682 drivers/crypto/axis/artpec6_crypto.c 	for (i = 0; i < dma->map_count; i++) {
dma               683 drivers/crypto/axis/artpec6_crypto.c 		struct artpec6_crypto_dma_map *map = &dma->maps[i];
dma               688 drivers/crypto/axis/artpec6_crypto.c 	dma->map_count = 0;
dma               739 drivers/crypto/axis/artpec6_crypto.c 	struct artpec6_crypto_dma_descriptors *dma = common->dma;
dma               742 drivers/crypto/axis/artpec6_crypto.c 	if (dma->in_cnt >= PDMA_DESCR_COUNT ||
dma               747 drivers/crypto/axis/artpec6_crypto.c 	d = &dma->in[dma->in_cnt++];
dma               817 drivers/crypto/axis/artpec6_crypto.c 	list_add_tail(&bbuf->list, &common->dma->bounce_buffers);
dma               953 drivers/crypto/axis/artpec6_crypto.c 	struct artpec6_crypto_dma_descriptors *dma = common->dma;
dma               956 drivers/crypto/axis/artpec6_crypto.c 	if (!dma->out_cnt || dma->out_cnt > PDMA_DESCR_COUNT) {
dma               958 drivers/crypto/axis/artpec6_crypto.c 			MODULE_NAME, dma->out_cnt ? "empty" : "full");
dma               963 drivers/crypto/axis/artpec6_crypto.c 	d = &dma->out[dma->out_cnt-1];
dma               977 drivers/crypto/axis/artpec6_crypto.c 	struct artpec6_crypto_dma_descriptors *dma = common->dma;
dma               980 drivers/crypto/axis/artpec6_crypto.c 	if (!dma->in_cnt || dma->in_cnt > PDMA_DESCR_COUNT) {
dma               982 drivers/crypto/axis/artpec6_crypto.c 			MODULE_NAME, dma->in_cnt ? "empty" : "full");
dma               986 drivers/crypto/axis/artpec6_crypto.c 	d = &dma->in[dma->in_cnt-1];
dma              1050 drivers/crypto/axis/artpec6_crypto.c 	common->dma = kmem_cache_alloc(ac->dma_cache, flags);
dma              1051 drivers/crypto/axis/artpec6_crypto.c 	if (!common->dma)
dma              1060 drivers/crypto/axis/artpec6_crypto.c artpec6_crypto_bounce_destroy(struct artpec6_crypto_dma_descriptors *dma)
dma              1065 drivers/crypto/axis/artpec6_crypto.c 	list_for_each_entry_safe(b, next, &dma->bounce_buffers, list) {
dma              1076 drivers/crypto/axis/artpec6_crypto.c 	artpec6_crypto_bounce_destroy(common->dma);
dma              1077 drivers/crypto/axis/artpec6_crypto.c 	kmem_cache_free(ac->dma_cache, common->dma);
dma              1078 drivers/crypto/axis/artpec6_crypto.c 	common->dma = NULL;
dma              2102 drivers/crypto/axis/artpec6_crypto.c 		struct artpec6_crypto_dma_descriptors *dma = req->dma;
dma              2106 drivers/crypto/axis/artpec6_crypto.c 		stataddr = dma->stat_dma_addr + 4 * (req->dma->in_cnt - 1);
dma              2112 drivers/crypto/axis/artpec6_crypto.c 		stat = req->dma->stat[req->dma->in_cnt-1];
dma              2305 drivers/crypto/axis/artpec6_crypto.c 	if (!req_ctx->common.dma) {
dma               296 drivers/crypto/caam/regs.h 	u32 dma;	/* DMA_VERSION */
dma                13 drivers/crypto/caam/sg_sw_qm.h static inline void __dma_to_qm_sg(struct qm_sg_entry *qm_sg_ptr, dma_addr_t dma,
dma                16 drivers/crypto/caam/sg_sw_qm.h 	qm_sg_entry_set64(qm_sg_ptr, dma);
dma                23 drivers/crypto/caam/sg_sw_qm.h 				    dma_addr_t dma, u32 len, u16 offset)
dma                25 drivers/crypto/caam/sg_sw_qm.h 	__dma_to_qm_sg(qm_sg_ptr, dma, offset);
dma                30 drivers/crypto/caam/sg_sw_qm.h 					 dma_addr_t dma, u32 len, u16 offset)
dma                32 drivers/crypto/caam/sg_sw_qm.h 	__dma_to_qm_sg(qm_sg_ptr, dma, offset);
dma                37 drivers/crypto/caam/sg_sw_qm.h 					dma_addr_t dma, u32 len, u16 offset)
dma                39 drivers/crypto/caam/sg_sw_qm.h 	__dma_to_qm_sg(qm_sg_ptr, dma, offset);
dma                44 drivers/crypto/caam/sg_sw_qm.h 					     dma_addr_t dma, u32 len,
dma                47 drivers/crypto/caam/sg_sw_qm.h 	__dma_to_qm_sg(qm_sg_ptr, dma, offset);
dma                13 drivers/crypto/caam/sg_sw_qm2.h 				    dma_addr_t dma, u32 len, u16 offset)
dma                15 drivers/crypto/caam/sg_sw_qm2.h 	dpaa2_sg_set_addr(qm_sg_ptr, dma);
dma                27 drivers/crypto/caam/sg_sw_sec4.h 				      dma_addr_t dma, u32 len, u16 offset)
dma                30 drivers/crypto/caam/sg_sw_sec4.h 		dma_to_qm_sg_one((struct dpaa2_sg_entry *)sec4_sg_ptr, dma, len,
dma                33 drivers/crypto/caam/sg_sw_sec4.h 		sec4_sg_ptr->ptr = cpu_to_caam_dma64(dma);
dma                51 drivers/crypto/cavium/nitrox/nitrox_dev.h 	dma_addr_t dma;
dma               137 drivers/crypto/cavium/nitrox/nitrox_hal.c 		nitrox_write_csr(ndev, offset, cmdq->dma);
dma               379 drivers/crypto/cavium/nitrox/nitrox_hal.c 		nitrox_write_csr(ndev, offset, cmdq->dma);
dma                36 drivers/crypto/cavium/nitrox/nitrox_lib.c 	cmdq->dma = PTR_ALIGN(cmdq->unalign_dma, align_bytes);
dma                37 drivers/crypto/cavium/nitrox/nitrox_lib.c 	cmdq->base = cmdq->unalign_base + (cmdq->dma - cmdq->unalign_dma);
dma                82 drivers/crypto/cavium/nitrox/nitrox_lib.c 	cmdq->dma = 0;
dma               220 drivers/crypto/cavium/nitrox/nitrox_lib.c 	dma_addr_t dma;
dma               226 drivers/crypto/cavium/nitrox/nitrox_lib.c 	vaddr = dma_pool_zalloc(ndev->ctx_pool, GFP_KERNEL, &dma);
dma               235 drivers/crypto/cavium/nitrox/nitrox_lib.c 	ctx->dma = dma;
dma               236 drivers/crypto/cavium/nitrox/nitrox_lib.c 	ctx->ctx_dma = dma + sizeof(struct ctx_hdr);
dma               239 drivers/crypto/cavium/nitrox/nitrox_lib.c 	chdr->dma = dma;
dma               257 drivers/crypto/cavium/nitrox/nitrox_lib.c 	dma_pool_free(ctxp->pool, ctxp->vaddr, ctxp->dma);
dma               195 drivers/crypto/cavium/nitrox/nitrox_req.h 	dma_addr_t dma;
dma               440 drivers/crypto/cavium/nitrox/nitrox_req.h 	dma_addr_t dma;
dma               457 drivers/crypto/cavium/nitrox/nitrox_req.h 	__be64 dma[4];
dma               111 drivers/crypto/cavium/nitrox/nitrox_reqmgr.c 	dma_addr_t dma;
dma               130 drivers/crypto/cavium/nitrox/nitrox_reqmgr.c 			sgcomp[i].dma[j] = cpu_to_be64(sg_dma_address(sg));
dma               135 drivers/crypto/cavium/nitrox/nitrox_reqmgr.c 	dma = dma_map_single(DEV(ndev), sgtbl->sgcomp, sz_comp, DMA_TO_DEVICE);
dma               136 drivers/crypto/cavium/nitrox/nitrox_reqmgr.c 	if (dma_mapping_error(DEV(ndev), dma)) {
dma               142 drivers/crypto/cavium/nitrox/nitrox_reqmgr.c 	sgtbl->sgcomp_dma = dma;
dma               158 drivers/crypto/ccp/ccp-dev-v3.c 	cr[1] = op->src.u.dma.length - 1;
dma               159 drivers/crypto/ccp/ccp-dev-v3.c 	cr[2] = ccp_addr_lo(&op->src.u.dma);
dma               162 drivers/crypto/ccp/ccp-dev-v3.c 		| ccp_addr_hi(&op->src.u.dma);
dma               163 drivers/crypto/ccp/ccp-dev-v3.c 	cr[4] = ccp_addr_lo(&op->dst.u.dma);
dma               165 drivers/crypto/ccp/ccp-dev-v3.c 		| ccp_addr_hi(&op->dst.u.dma);
dma               188 drivers/crypto/ccp/ccp-dev-v3.c 	cr[1] = op->src.u.dma.length - 1;
dma               189 drivers/crypto/ccp/ccp-dev-v3.c 	cr[2] = ccp_addr_lo(&op->src.u.dma);
dma               192 drivers/crypto/ccp/ccp-dev-v3.c 		| ccp_addr_hi(&op->src.u.dma);
dma               193 drivers/crypto/ccp/ccp-dev-v3.c 	cr[4] = ccp_addr_lo(&op->dst.u.dma);
dma               195 drivers/crypto/ccp/ccp-dev-v3.c 		| ccp_addr_hi(&op->dst.u.dma);
dma               214 drivers/crypto/ccp/ccp-dev-v3.c 	cr[1] = op->src.u.dma.length - 1;
dma               215 drivers/crypto/ccp/ccp-dev-v3.c 	cr[2] = ccp_addr_lo(&op->src.u.dma);
dma               218 drivers/crypto/ccp/ccp-dev-v3.c 		| ccp_addr_hi(&op->src.u.dma);
dma               242 drivers/crypto/ccp/ccp-dev-v3.c 	cr[2] = ccp_addr_lo(&op->src.u.dma);
dma               245 drivers/crypto/ccp/ccp-dev-v3.c 		| ccp_addr_hi(&op->src.u.dma);
dma               246 drivers/crypto/ccp/ccp-dev-v3.c 	cr[4] = ccp_addr_lo(&op->dst.u.dma);
dma               248 drivers/crypto/ccp/ccp-dev-v3.c 		| ccp_addr_hi(&op->dst.u.dma);
dma               263 drivers/crypto/ccp/ccp-dev-v3.c 		cr[1] = op->src.u.dma.length - 1;
dma               265 drivers/crypto/ccp/ccp-dev-v3.c 		cr[1] = op->dst.u.dma.length - 1;
dma               268 drivers/crypto/ccp/ccp-dev-v3.c 		cr[2] = ccp_addr_lo(&op->src.u.dma);
dma               270 drivers/crypto/ccp/ccp-dev-v3.c 			| ccp_addr_hi(&op->src.u.dma);
dma               280 drivers/crypto/ccp/ccp-dev-v3.c 		cr[4] = ccp_addr_lo(&op->dst.u.dma);
dma               282 drivers/crypto/ccp/ccp-dev-v3.c 			| ccp_addr_hi(&op->dst.u.dma);
dma               303 drivers/crypto/ccp/ccp-dev-v3.c 	cr[1] = op->src.u.dma.length - 1;
dma               304 drivers/crypto/ccp/ccp-dev-v3.c 	cr[2] = ccp_addr_lo(&op->src.u.dma);
dma               306 drivers/crypto/ccp/ccp-dev-v3.c 		| ccp_addr_hi(&op->src.u.dma);
dma               307 drivers/crypto/ccp/ccp-dev-v3.c 	cr[4] = ccp_addr_lo(&op->dst.u.dma);
dma               309 drivers/crypto/ccp/ccp-dev-v3.c 		| ccp_addr_hi(&op->dst.u.dma);
dma               304 drivers/crypto/ccp/ccp-dev-v5.c 	CCP5_CMD_LEN(&desc) = op->src.u.dma.length;
dma               306 drivers/crypto/ccp/ccp-dev-v5.c 	CCP5_CMD_SRC_LO(&desc) = ccp_addr_lo(&op->src.u.dma);
dma               307 drivers/crypto/ccp/ccp-dev-v5.c 	CCP5_CMD_SRC_HI(&desc) = ccp_addr_hi(&op->src.u.dma);
dma               310 drivers/crypto/ccp/ccp-dev-v5.c 	CCP5_CMD_DST_LO(&desc) = ccp_addr_lo(&op->dst.u.dma);
dma               311 drivers/crypto/ccp/ccp-dev-v5.c 	CCP5_CMD_DST_HI(&desc) = ccp_addr_hi(&op->dst.u.dma);
dma               347 drivers/crypto/ccp/ccp-dev-v5.c 	CCP5_CMD_LEN(&desc) = op->src.u.dma.length;
dma               349 drivers/crypto/ccp/ccp-dev-v5.c 	CCP5_CMD_SRC_LO(&desc) = ccp_addr_lo(&op->src.u.dma);
dma               350 drivers/crypto/ccp/ccp-dev-v5.c 	CCP5_CMD_SRC_HI(&desc) = ccp_addr_hi(&op->src.u.dma);
dma               353 drivers/crypto/ccp/ccp-dev-v5.c 	CCP5_CMD_DST_LO(&desc) = ccp_addr_lo(&op->dst.u.dma);
dma               354 drivers/crypto/ccp/ccp-dev-v5.c 	CCP5_CMD_DST_HI(&desc) = ccp_addr_hi(&op->dst.u.dma);
dma               387 drivers/crypto/ccp/ccp-dev-v5.c 	CCP5_CMD_LEN(&desc) = op->src.u.dma.length;
dma               389 drivers/crypto/ccp/ccp-dev-v5.c 	CCP5_CMD_SRC_LO(&desc) = ccp_addr_lo(&op->src.u.dma);
dma               390 drivers/crypto/ccp/ccp-dev-v5.c 	CCP5_CMD_SRC_HI(&desc) = ccp_addr_hi(&op->src.u.dma);
dma               431 drivers/crypto/ccp/ccp-dev-v5.c 	CCP5_CMD_LEN(&desc) = op->src.u.dma.length;
dma               433 drivers/crypto/ccp/ccp-dev-v5.c 	CCP5_CMD_SRC_LO(&desc) = ccp_addr_lo(&op->src.u.dma);
dma               434 drivers/crypto/ccp/ccp-dev-v5.c 	CCP5_CMD_SRC_HI(&desc) = ccp_addr_hi(&op->src.u.dma);
dma               437 drivers/crypto/ccp/ccp-dev-v5.c 	CCP5_CMD_DST_LO(&desc) = ccp_addr_lo(&op->dst.u.dma);
dma               438 drivers/crypto/ccp/ccp-dev-v5.c 	CCP5_CMD_DST_HI(&desc) = ccp_addr_hi(&op->dst.u.dma);
dma               474 drivers/crypto/ccp/ccp-dev-v5.c 	CCP5_CMD_SRC_LO(&desc) = ccp_addr_lo(&op->src.u.dma);
dma               475 drivers/crypto/ccp/ccp-dev-v5.c 	CCP5_CMD_SRC_HI(&desc) = ccp_addr_hi(&op->src.u.dma);
dma               479 drivers/crypto/ccp/ccp-dev-v5.c 	CCP5_CMD_DST_LO(&desc) = ccp_addr_lo(&op->dst.u.dma);
dma               480 drivers/crypto/ccp/ccp-dev-v5.c 	CCP5_CMD_DST_HI(&desc) = ccp_addr_hi(&op->dst.u.dma);
dma               484 drivers/crypto/ccp/ccp-dev-v5.c 	CCP5_CMD_KEY_LO(&desc) = ccp_addr_lo(&op->exp.u.dma);
dma               485 drivers/crypto/ccp/ccp-dev-v5.c 	CCP5_CMD_KEY_HI(&desc) = ccp_addr_hi(&op->exp.u.dma);
dma               495 drivers/crypto/ccp/ccp-dev-v5.c 	struct ccp_dma_info *saddr = &op->src.u.dma;
dma               496 drivers/crypto/ccp/ccp-dev-v5.c 	struct ccp_dma_info *daddr = &op->dst.u.dma;
dma               523 drivers/crypto/ccp/ccp-dev-v5.c 		CCP5_CMD_SRC_LO(&desc) = ccp_addr_lo(&op->src.u.dma);
dma               524 drivers/crypto/ccp/ccp-dev-v5.c 		CCP5_CMD_SRC_HI(&desc) = ccp_addr_hi(&op->src.u.dma);
dma               538 drivers/crypto/ccp/ccp-dev-v5.c 		CCP5_CMD_DST_LO(&desc) = ccp_addr_lo(&op->dst.u.dma);
dma               539 drivers/crypto/ccp/ccp-dev-v5.c 		CCP5_CMD_DST_HI(&desc) = ccp_addr_hi(&op->dst.u.dma);
dma               574 drivers/crypto/ccp/ccp-dev-v5.c 	CCP5_CMD_LEN(&desc) = op->src.u.dma.length;
dma               576 drivers/crypto/ccp/ccp-dev-v5.c 	CCP5_CMD_SRC_LO(&desc) = ccp_addr_lo(&op->src.u.dma);
dma               577 drivers/crypto/ccp/ccp-dev-v5.c 	CCP5_CMD_SRC_HI(&desc) = ccp_addr_hi(&op->src.u.dma);
dma               580 drivers/crypto/ccp/ccp-dev-v5.c 	CCP5_CMD_DST_LO(&desc) = ccp_addr_lo(&op->dst.u.dma);
dma               581 drivers/crypto/ccp/ccp-dev-v5.c 	CCP5_CMD_DST_HI(&desc) = ccp_addr_hi(&op->dst.u.dma);
dma               462 drivers/crypto/ccp/ccp-dev.h 	struct ccp_dma_info dma;
dma               487 drivers/crypto/ccp/ccp-dev.h 		struct ccp_dma_info dma;
dma               124 drivers/crypto/ccp/ccp-ops.c 				      wa->dma.address);
dma               126 drivers/crypto/ccp/ccp-ops.c 		if (wa->dma.address)
dma               127 drivers/crypto/ccp/ccp-ops.c 			dma_unmap_single(wa->dev, wa->dma.address, wa->length,
dma               128 drivers/crypto/ccp/ccp-ops.c 					 wa->dma.dir);
dma               133 drivers/crypto/ccp/ccp-ops.c 	wa->dma.address = 0;
dma               153 drivers/crypto/ccp/ccp-ops.c 					     &wa->dma.address);
dma               157 drivers/crypto/ccp/ccp-ops.c 		wa->dma.length = CCP_DMAPOOL_MAX_SIZE;
dma               164 drivers/crypto/ccp/ccp-ops.c 		wa->dma.address = dma_map_single(wa->dev, wa->address, len,
dma               166 drivers/crypto/ccp/ccp-ops.c 		if (dma_mapping_error(wa->dev, wa->dma.address))
dma               169 drivers/crypto/ccp/ccp-ops.c 		wa->dma.length = len;
dma               171 drivers/crypto/ccp/ccp-ops.c 	wa->dma.dir = dir;
dma               361 drivers/crypto/ccp/ccp-ops.c 		op->src.u.dma.address = src->dm_wa.dma.address;
dma               362 drivers/crypto/ccp/ccp-ops.c 		op->src.u.dma.offset = 0;
dma               363 drivers/crypto/ccp/ccp-ops.c 		op->src.u.dma.length = (blocksize_op) ? block_size : cp_len;
dma               368 drivers/crypto/ccp/ccp-ops.c 		op->src.u.dma.address = sg_dma_address(src->sg_wa.sg);
dma               369 drivers/crypto/ccp/ccp-ops.c 		op->src.u.dma.offset = src->sg_wa.sg_used;
dma               370 drivers/crypto/ccp/ccp-ops.c 		op->src.u.dma.length = op_len & ~(block_size - 1);
dma               372 drivers/crypto/ccp/ccp-ops.c 		ccp_update_sg_workarea(&src->sg_wa, op->src.u.dma.length);
dma               382 drivers/crypto/ccp/ccp-ops.c 			op->dst.u.dma.address = dst->dm_wa.dma.address;
dma               383 drivers/crypto/ccp/ccp-ops.c 			op->dst.u.dma.offset = 0;
dma               384 drivers/crypto/ccp/ccp-ops.c 			op->dst.u.dma.length = op->src.u.dma.length;
dma               389 drivers/crypto/ccp/ccp-ops.c 			op->dst.u.dma.address = sg_dma_address(dst->sg_wa.sg);
dma               390 drivers/crypto/ccp/ccp-ops.c 			op->dst.u.dma.offset = dst->sg_wa.sg_used;
dma               391 drivers/crypto/ccp/ccp-ops.c 			op->dst.u.dma.length = op->src.u.dma.length;
dma               402 drivers/crypto/ccp/ccp-ops.c 		if (op->dst.u.dma.address == dst->dm_wa.dma.address)
dma               406 drivers/crypto/ccp/ccp-ops.c 					       op->dst.u.dma.length);
dma               427 drivers/crypto/ccp/ccp-ops.c 		op.dst.u.dma.address = wa->dma.address;
dma               428 drivers/crypto/ccp/ccp-ops.c 		op.dst.u.dma.length = wa->length;
dma               431 drivers/crypto/ccp/ccp-ops.c 		op.src.u.dma.address = wa->dma.address;
dma               432 drivers/crypto/ccp/ccp-ops.c 		op.src.u.dma.length = wa->length;
dma               842 drivers/crypto/ccp/ccp-ops.c 	op.src.u.dma.address = final_wa.dma.address;
dma               843 drivers/crypto/ccp/ccp-ops.c 	op.src.u.dma.length = AES_BLOCK_SIZE;
dma               845 drivers/crypto/ccp/ccp-ops.c 	op.dst.u.dma.address = final_wa.dma.address;
dma               846 drivers/crypto/ccp/ccp-ops.c 	op.dst.u.dma.length = AES_BLOCK_SIZE;
dma              1887 drivers/crypto/ccp/ccp-ops.c 		op.exp.u.dma.address = exp.dma.address;
dma              1888 drivers/crypto/ccp/ccp-ops.c 		op.exp.u.dma.offset = 0;
dma              1912 drivers/crypto/ccp/ccp-ops.c 	op.src.u.dma.address = src.dma.address;
dma              1913 drivers/crypto/ccp/ccp-ops.c 	op.src.u.dma.offset = 0;
dma              1914 drivers/crypto/ccp/ccp-ops.c 	op.src.u.dma.length = i_len;
dma              1915 drivers/crypto/ccp/ccp-ops.c 	op.dst.u.dma.address = dst.dma.address;
dma              1916 drivers/crypto/ccp/ccp-ops.c 	op.dst.u.dma.offset = 0;
dma              1917 drivers/crypto/ccp/ccp-ops.c 	op.dst.u.dma.length = o_len;
dma              2041 drivers/crypto/ccp/ccp-ops.c 		op.src.u.dma.address = sg_dma_address(src.sg_wa.sg);
dma              2042 drivers/crypto/ccp/ccp-ops.c 		op.src.u.dma.offset = 0;
dma              2043 drivers/crypto/ccp/ccp-ops.c 		op.src.u.dma.length = sg_dma_len(src.sg_wa.sg);
dma              2046 drivers/crypto/ccp/ccp-ops.c 		op.dst.u.dma.address = sg_dma_address(dst.sg_wa.sg);
dma              2047 drivers/crypto/ccp/ccp-ops.c 		op.dst.u.dma.offset = dst.sg_wa.sg_used;
dma              2048 drivers/crypto/ccp/ccp-ops.c 		op.dst.u.dma.length = op.src.u.dma.length;
dma              2111 drivers/crypto/ccp/ccp-ops.c 		mask.dma.address = pt->mask;
dma              2112 drivers/crypto/ccp/ccp-ops.c 		mask.dma.length = pt->mask_len;
dma              2127 drivers/crypto/ccp/ccp-ops.c 	op.src.u.dma.address = pt->src_dma;
dma              2128 drivers/crypto/ccp/ccp-ops.c 	op.src.u.dma.offset = 0;
dma              2129 drivers/crypto/ccp/ccp-ops.c 	op.src.u.dma.length = pt->src_len;
dma              2132 drivers/crypto/ccp/ccp-ops.c 	op.dst.u.dma.address = pt->dst_dma;
dma              2133 drivers/crypto/ccp/ccp-ops.c 	op.dst.u.dma.offset = 0;
dma              2134 drivers/crypto/ccp/ccp-ops.c 	op.dst.u.dma.length = pt->src_len;
dma              2215 drivers/crypto/ccp/ccp-ops.c 	op.src.u.dma.address = src.dma.address;
dma              2216 drivers/crypto/ccp/ccp-ops.c 	op.src.u.dma.offset = 0;
dma              2217 drivers/crypto/ccp/ccp-ops.c 	op.src.u.dma.length = src.length;
dma              2218 drivers/crypto/ccp/ccp-ops.c 	op.dst.u.dma.address = dst.dma.address;
dma              2219 drivers/crypto/ccp/ccp-ops.c 	op.dst.u.dma.offset = 0;
dma              2220 drivers/crypto/ccp/ccp-ops.c 	op.dst.u.dma.length = dst.length;
dma              2373 drivers/crypto/ccp/ccp-ops.c 	op.src.u.dma.address = src.dma.address;
dma              2374 drivers/crypto/ccp/ccp-ops.c 	op.src.u.dma.offset = 0;
dma              2375 drivers/crypto/ccp/ccp-ops.c 	op.src.u.dma.length = src.length;
dma              2376 drivers/crypto/ccp/ccp-ops.c 	op.dst.u.dma.address = dst.dma.address;
dma              2377 drivers/crypto/ccp/ccp-ops.c 	op.dst.u.dma.offset = 0;
dma              2378 drivers/crypto/ccp/ccp-ops.c 	op.dst.u.dma.length = dst.length;
dma               881 drivers/crypto/hifn_795x.c 	struct hifn_dma *dma = (struct hifn_dma *)dev->desc_virt;
dma               886 drivers/crypto/hifn_795x.c 		dma->cmdr[i].p = __cpu_to_le32(dptr +
dma               889 drivers/crypto/hifn_795x.c 		dma->resr[i].p = __cpu_to_le32(dptr +
dma               893 drivers/crypto/hifn_795x.c 	dma->cmdr[HIFN_D_CMD_RSIZE].p = __cpu_to_le32(dptr +
dma               895 drivers/crypto/hifn_795x.c 	dma->srcr[HIFN_D_SRC_RSIZE].p = __cpu_to_le32(dptr +
dma               897 drivers/crypto/hifn_795x.c 	dma->dstr[HIFN_D_DST_RSIZE].p = __cpu_to_le32(dptr +
dma               899 drivers/crypto/hifn_795x.c 	dma->resr[HIFN_D_RES_RSIZE].p = __cpu_to_le32(dptr +
dma               902 drivers/crypto/hifn_795x.c 	dma->cmdu = dma->srcu = dma->dstu = dma->resu = 0;
dma               903 drivers/crypto/hifn_795x.c 	dma->cmdi = dma->srci = dma->dsti = dma->resi = 0;
dma               904 drivers/crypto/hifn_795x.c 	dma->cmdk = dma->srck = dma->dstk = dma->resk = 0;
dma              1074 drivers/crypto/hifn_795x.c 	struct hifn_dma *dma = (struct hifn_dma *)dev->desc_virt;
dma              1091 drivers/crypto/hifn_795x.c 	dma->cmdu++;
dma              1092 drivers/crypto/hifn_795x.c 	if (dma->cmdu > 1) {
dma              1115 drivers/crypto/hifn_795x.c 	struct hifn_dma *dma = (struct hifn_dma *)dev->desc_virt;
dma              1120 drivers/crypto/hifn_795x.c 	sa_idx = dma->cmdi;
dma              1121 drivers/crypto/hifn_795x.c 	buf_pos = buf = dma->command_bufs[dma->cmdi];
dma              1208 drivers/crypto/hifn_795x.c 	dma->cmdr[dma->cmdi].l = __cpu_to_le32(cmd_len | HIFN_D_VALID |
dma              1211 drivers/crypto/hifn_795x.c 	if (++dma->cmdi == HIFN_D_CMD_RSIZE) {
dma              1212 drivers/crypto/hifn_795x.c 		dma->cmdr[dma->cmdi].l = __cpu_to_le32(
dma              1215 drivers/crypto/hifn_795x.c 		dma->cmdi = 0;
dma              1217 drivers/crypto/hifn_795x.c 		dma->cmdr[dma->cmdi - 1].l |= __cpu_to_le32(HIFN_D_VALID);
dma              1233 drivers/crypto/hifn_795x.c 	struct hifn_dma *dma = (struct hifn_dma *)dev->desc_virt;
dma              1239 drivers/crypto/hifn_795x.c 	idx = dma->srci;
dma              1241 drivers/crypto/hifn_795x.c 	dma->srcr[idx].p = __cpu_to_le32(addr);
dma              1242 drivers/crypto/hifn_795x.c 	dma->srcr[idx].l = __cpu_to_le32(size | HIFN_D_VALID |
dma              1246 drivers/crypto/hifn_795x.c 		dma->srcr[idx].l = __cpu_to_le32(HIFN_D_VALID |
dma              1252 drivers/crypto/hifn_795x.c 	dma->srci = idx;
dma              1253 drivers/crypto/hifn_795x.c 	dma->srcu++;
dma              1265 drivers/crypto/hifn_795x.c 	struct hifn_dma *dma = (struct hifn_dma *)dev->desc_virt;
dma              1267 drivers/crypto/hifn_795x.c 	dma->resr[dma->resi].l = __cpu_to_le32(HIFN_USED_RESULT |
dma              1274 drivers/crypto/hifn_795x.c 	if (++dma->resi == HIFN_D_RES_RSIZE) {
dma              1275 drivers/crypto/hifn_795x.c 		dma->resr[HIFN_D_RES_RSIZE].l = __cpu_to_le32(HIFN_D_VALID |
dma              1277 drivers/crypto/hifn_795x.c 		dma->resi = 0;
dma              1280 drivers/crypto/hifn_795x.c 	dma->resu++;
dma              1291 drivers/crypto/hifn_795x.c 	struct hifn_dma *dma = (struct hifn_dma *)dev->desc_virt;
dma              1297 drivers/crypto/hifn_795x.c 	idx = dma->dsti;
dma              1298 drivers/crypto/hifn_795x.c 	dma->dstr[idx].p = __cpu_to_le32(addr);
dma              1299 drivers/crypto/hifn_795x.c 	dma->dstr[idx].l = __cpu_to_le32(size |	HIFN_D_VALID |
dma              1303 drivers/crypto/hifn_795x.c 		dma->dstr[idx].l = __cpu_to_le32(HIFN_D_VALID |
dma              1308 drivers/crypto/hifn_795x.c 	dma->dsti = idx;
dma              1309 drivers/crypto/hifn_795x.c 	dma->dstu++;
dma              1710 drivers/crypto/hifn_795x.c 	struct hifn_dma *dma = (struct hifn_dma *)dev->desc_virt;
dma              1715 drivers/crypto/hifn_795x.c 			dma->cmdi, dma->srci, dma->dsti, dma->resi,
dma              1716 drivers/crypto/hifn_795x.c 			dma->cmdu, dma->srcu, dma->dstu, dma->resu,
dma              1717 drivers/crypto/hifn_795x.c 			dma->cmdk, dma->srck, dma->dstk, dma->resk);
dma              1719 drivers/crypto/hifn_795x.c 	i = dma->resk; u = dma->resu;
dma              1721 drivers/crypto/hifn_795x.c 		if (dma->resr[i].l & __cpu_to_le32(HIFN_D_VALID))
dma              1735 drivers/crypto/hifn_795x.c 	dma->resk = i; dma->resu = u;
dma              1737 drivers/crypto/hifn_795x.c 	i = dma->srck; u = dma->srcu;
dma              1739 drivers/crypto/hifn_795x.c 		if (dma->srcr[i].l & __cpu_to_le32(HIFN_D_VALID))
dma              1745 drivers/crypto/hifn_795x.c 	dma->srck = i; dma->srcu = u;
dma              1747 drivers/crypto/hifn_795x.c 	i = dma->cmdk; u = dma->cmdu;
dma              1749 drivers/crypto/hifn_795x.c 		if (dma->cmdr[i].l & __cpu_to_le32(HIFN_D_VALID))
dma              1755 drivers/crypto/hifn_795x.c 	dma->cmdk = i; dma->cmdu = u;
dma              1757 drivers/crypto/hifn_795x.c 	i = dma->dstk; u = dma->dstu;
dma              1759 drivers/crypto/hifn_795x.c 		if (dma->dstr[i].l & __cpu_to_le32(HIFN_D_VALID))
dma              1765 drivers/crypto/hifn_795x.c 	dma->dstk = i; dma->dstu = u;
dma              1769 drivers/crypto/hifn_795x.c 			dma->cmdi, dma->srci, dma->dsti, dma->resi,
dma              1770 drivers/crypto/hifn_795x.c 			dma->cmdu, dma->srcu, dma->dstu, dma->resu,
dma              1771 drivers/crypto/hifn_795x.c 			dma->cmdk, dma->srck, dma->dstk, dma->resk);
dma              1784 drivers/crypto/hifn_795x.c 		struct hifn_dma *dma = (struct hifn_dma *)dev->desc_virt;
dma              1786 drivers/crypto/hifn_795x.c 		if (dma->cmdu == 0 && (dev->flags & HIFN_FLAG_CMD_BUSY)) {
dma              1790 drivers/crypto/hifn_795x.c 		if (dma->srcu == 0 && (dev->flags & HIFN_FLAG_SRC_BUSY)) {
dma              1794 drivers/crypto/hifn_795x.c 		if (dma->dstu == 0 && (dev->flags & HIFN_FLAG_DST_BUSY)) {
dma              1798 drivers/crypto/hifn_795x.c 		if (dma->resu == 0 && (dev->flags & HIFN_FLAG_RES_BUSY)) {
dma              1815 drivers/crypto/hifn_795x.c 			struct hifn_dma *dma = (struct hifn_dma *)dev->desc_virt;
dma              1826 drivers/crypto/hifn_795x.c 				pr_info("%x.%p ", dma->resr[i].l, dev->sa[i]);
dma              1849 drivers/crypto/hifn_795x.c 	struct hifn_dma *dma = (struct hifn_dma *)dev->desc_virt;
dma              1856 drivers/crypto/hifn_795x.c 		dmacsr, dev->dmareg, dmacsr & dev->dmareg, dma->cmdi,
dma              1857 drivers/crypto/hifn_795x.c 		dma->cmdi, dma->srci, dma->dsti, dma->resi,
dma              1858 drivers/crypto/hifn_795x.c 		dma->cmdu, dma->srcu, dma->dstu, dma->resu);
dma              1898 drivers/crypto/hifn_795x.c 	if ((dmacsr & HIFN_DMACSR_C_WAIT) && (dma->cmdu == 0)) {
dma              1914 drivers/crypto/hifn_795x.c 	struct hifn_dma *dma = (struct hifn_dma *)dev->desc_virt;
dma              1918 drivers/crypto/hifn_795x.c 		struct hifn_desc *d = &dma->resr[i];
dma              1135 drivers/crypto/hisilicon/qm.c 						 &qp->qdma.dma, GFP_KERNEL);
dma              1142 drivers/crypto/hisilicon/qm.c 			qp->qdma.va, &qp->qdma.dma, qp->qdma.size);
dma              1160 drivers/crypto/hisilicon/qm.c 				  qp->qdma.dma);
dma              1185 drivers/crypto/hisilicon/qm.c 		dma_free_coherent(dev, qdma->size, qdma->va, qdma->dma);
dma              1283 drivers/crypto/hisilicon/qm.c 	(qp)->type##_dma = (qp)->qdma.dma + (off); \
dma              1287 drivers/crypto/hisilicon/qm.c 	if (!qp->qdma.dma) {
dma              1293 drivers/crypto/hisilicon/qm.c 	if (qp->qdma.dma & QM_SQE_DATA_ALIGN_MASK) {
dma              1492 drivers/crypto/hisilicon/qm.c 				  qm->qdma.va, qm->qdma.dma);
dma              1629 drivers/crypto/hisilicon/qm.c 	(qm)->type##_dma = (qm)->qdma.dma + (off); \
dma              1633 drivers/crypto/hisilicon/qm.c 	WARN_ON(!qm->qdma.dma);
dma              1717 drivers/crypto/hisilicon/qm.c 						 &qm->qdma.dma, GFP_KERNEL);
dma              1719 drivers/crypto/hisilicon/qm.c 			qm->qdma.va, &qm->qdma.dma, qm->qdma.size);
dma               114 drivers/crypto/hisilicon/qm.h 	dma_addr_t dma;
dma               163 drivers/crypto/img-hash.c static void img_hash_start(struct img_hash_dev *hdev, bool dma)
dma               185 drivers/crypto/img-hash.c 	if (!dma)
dma               340 drivers/crypto/marvell/cesa.c 	struct mv_cesa_dev_dma *dma;
dma               345 drivers/crypto/marvell/cesa.c 	dma = devm_kzalloc(dev, sizeof(*dma), GFP_KERNEL);
dma               346 drivers/crypto/marvell/cesa.c 	if (!dma)
dma               349 drivers/crypto/marvell/cesa.c 	dma->tdma_desc_pool = dmam_pool_create("tdma_desc", dev,
dma               352 drivers/crypto/marvell/cesa.c 	if (!dma->tdma_desc_pool)
dma               355 drivers/crypto/marvell/cesa.c 	dma->op_pool = dmam_pool_create("cesa_op", dev,
dma               357 drivers/crypto/marvell/cesa.c 	if (!dma->op_pool)
dma               360 drivers/crypto/marvell/cesa.c 	dma->cache_pool = dmam_pool_create("cesa_cache", dev,
dma               362 drivers/crypto/marvell/cesa.c 	if (!dma->cache_pool)
dma               365 drivers/crypto/marvell/cesa.c 	dma->padding_pool = dmam_pool_create("cesa_padding", dev, 72, 1, 0);
dma               366 drivers/crypto/marvell/cesa.c 	if (!dma->padding_pool)
dma               369 drivers/crypto/marvell/cesa.c 	cesa->dma = dma;
dma               419 drivers/crypto/marvell/cesa.h 	struct mv_cesa_dev_dma *dma;
dma               603 drivers/crypto/marvell/cesa.h 		struct mv_cesa_ahash_dma_req dma;
dma                49 drivers/crypto/marvell/hash.c 	req->cache = dma_pool_alloc(cesa_dev->dma->cache_pool, flags,
dma                63 drivers/crypto/marvell/hash.c 	dma_pool_free(cesa_dev->dma->cache_pool, req->cache,
dma                73 drivers/crypto/marvell/hash.c 	req->padding = dma_pool_alloc(cesa_dev->dma->padding_pool, flags,
dma                86 drivers/crypto/marvell/hash.c 	dma_pool_free(cesa_dev->dma->padding_pool, req->padding,
dma                95 drivers/crypto/marvell/hash.c 	mv_cesa_ahash_dma_free_padding(&creq->req.dma);
dma               103 drivers/crypto/marvell/hash.c 	mv_cesa_ahash_dma_free_cache(&creq->req.dma);
dma               491 drivers/crypto/marvell/hash.c 	struct mv_cesa_ahash_dma_req *ahashdreq = &creq->req.dma;
dma               517 drivers/crypto/marvell/hash.c 	struct mv_cesa_ahash_dma_req *ahashdreq = &creq->req.dma;
dma                67 drivers/crypto/marvell/tdma.c 			dma_pool_free(cesa_dev->dma->op_pool, tdma->op,
dma                71 drivers/crypto/marvell/tdma.c 		dma_pool_free(cesa_dev->dma->tdma_desc_pool, old_tdma,
dma               195 drivers/crypto/marvell/tdma.c 	new_tdma = dma_pool_zalloc(cesa_dev->dma->tdma_desc_pool, flags,
dma               261 drivers/crypto/marvell/tdma.c 	op = dma_pool_alloc(cesa_dev->dma->op_pool, flags, &dma_handle);
dma               192 drivers/crypto/mediatek/mtk-aes.c 				  struct mtk_aes_dma *dma)
dma               207 drivers/crypto/mediatek/mtk-aes.c 			dma->nents = nents + 1;
dma               208 drivers/crypto/mediatek/mtk-aes.c 			dma->remainder = sg->length - len;
dma               229 drivers/crypto/mediatek/mtk-aes.c static inline void mtk_aes_restore_sg(const struct mtk_aes_dma *dma)
dma               231 drivers/crypto/mediatek/mtk-aes.c 	struct scatterlist *sg = dma->sg;
dma               232 drivers/crypto/mediatek/mtk-aes.c 	int nents = dma->nents;
dma               234 drivers/crypto/mediatek/mtk-aes.c 	if (!dma->remainder)
dma               243 drivers/crypto/mediatek/mtk-aes.c 	sg->length += dma->remainder;
dma               194 drivers/crypto/omap-sham.c 				      int final, int dma);
dma               384 drivers/crypto/omap-sham.c 				 int final, int dma)
dma               393 drivers/crypto/omap-sham.c 		SHA_REG_MASK_IT_EN | (dma ? SHA_REG_MASK_DMA_EN : 0),
dma               453 drivers/crypto/omap-sham.c 				 int final, int dma)
dma               500 drivers/crypto/omap-sham.c 				     (dma ? SHA_REG_MASK_DMA_EN : 0),
dma                33 drivers/crypto/qce/ablkcipher.c 	error = qce_dma_terminate_all(&qce->dma);
dma                96 drivers/crypto/qce/ablkcipher.c 	sg_init_one(&rctx->result_sg, qce->dma.result_buf, QCE_RESULT_BUF_SZ);
dma               126 drivers/crypto/qce/ablkcipher.c 	ret = qce_dma_prep_sgs(&qce->dma, rctx->src_sg, rctx->src_nents,
dma               132 drivers/crypto/qce/ablkcipher.c 	qce_dma_issue_pending(&qce->dma);
dma               141 drivers/crypto/qce/ablkcipher.c 	qce_dma_terminate_all(&qce->dma);
dma               211 drivers/crypto/qce/core.c 	ret = qce_dma_request(qce->dev, &qce->dma);
dma               234 drivers/crypto/qce/core.c 	qce_dma_release(&qce->dma);
dma               250 drivers/crypto/qce/core.c 	qce_dma_release(&qce->dma);
dma                38 drivers/crypto/qce/core.h 	struct qce_dma_data dma;
dma                11 drivers/crypto/qce/dma.c int qce_dma_request(struct device *dev, struct qce_dma_data *dma)
dma                15 drivers/crypto/qce/dma.c 	dma->txchan = dma_request_slave_channel_reason(dev, "tx");
dma                16 drivers/crypto/qce/dma.c 	if (IS_ERR(dma->txchan))
dma                17 drivers/crypto/qce/dma.c 		return PTR_ERR(dma->txchan);
dma                19 drivers/crypto/qce/dma.c 	dma->rxchan = dma_request_slave_channel_reason(dev, "rx");
dma                20 drivers/crypto/qce/dma.c 	if (IS_ERR(dma->rxchan)) {
dma                21 drivers/crypto/qce/dma.c 		ret = PTR_ERR(dma->rxchan);
dma                25 drivers/crypto/qce/dma.c 	dma->result_buf = kmalloc(QCE_RESULT_BUF_SZ + QCE_IGNORE_BUF_SZ,
dma                27 drivers/crypto/qce/dma.c 	if (!dma->result_buf) {
dma                32 drivers/crypto/qce/dma.c 	dma->ignore_buf = dma->result_buf + QCE_RESULT_BUF_SZ;
dma                36 drivers/crypto/qce/dma.c 	dma_release_channel(dma->rxchan);
dma                38 drivers/crypto/qce/dma.c 	dma_release_channel(dma->txchan);
dma                42 drivers/crypto/qce/dma.c void qce_dma_release(struct qce_dma_data *dma)
dma                44 drivers/crypto/qce/dma.c 	dma_release_channel(dma->txchan);
dma                45 drivers/crypto/qce/dma.c 	dma_release_channel(dma->rxchan);
dma                46 drivers/crypto/qce/dma.c 	kfree(dma->result_buf);
dma                96 drivers/crypto/qce/dma.c int qce_dma_prep_sgs(struct qce_dma_data *dma, struct scatterlist *rx_sg,
dma               100 drivers/crypto/qce/dma.c 	struct dma_chan *rxchan = dma->rxchan;
dma               101 drivers/crypto/qce/dma.c 	struct dma_chan *txchan = dma->txchan;
dma               114 drivers/crypto/qce/dma.c void qce_dma_issue_pending(struct qce_dma_data *dma)
dma               116 drivers/crypto/qce/dma.c 	dma_async_issue_pending(dma->rxchan);
dma               117 drivers/crypto/qce/dma.c 	dma_async_issue_pending(dma->txchan);
dma               120 drivers/crypto/qce/dma.c int qce_dma_terminate_all(struct qce_dma_data *dma)
dma               124 drivers/crypto/qce/dma.c 	ret = dmaengine_terminate_all(dma->rxchan);
dma               125 drivers/crypto/qce/dma.c 	return ret ?: dmaengine_terminate_all(dma->txchan);
dma                37 drivers/crypto/qce/dma.h int qce_dma_request(struct device *dev, struct qce_dma_data *dma);
dma                38 drivers/crypto/qce/dma.h void qce_dma_release(struct qce_dma_data *dma);
dma                39 drivers/crypto/qce/dma.h int qce_dma_prep_sgs(struct qce_dma_data *dma, struct scatterlist *sg_in,
dma                42 drivers/crypto/qce/dma.h void qce_dma_issue_pending(struct qce_dma_data *dma);
dma                43 drivers/crypto/qce/dma.h int qce_dma_terminate_all(struct qce_dma_data *dma);
dma                37 drivers/crypto/qce/sha.c 	struct qce_result_dump *result = qce->dma.result_buf;
dma                42 drivers/crypto/qce/sha.c 	error = qce_dma_terminate_all(&qce->dma);
dma                96 drivers/crypto/qce/sha.c 	sg_init_one(&rctx->result_sg, qce->dma.result_buf, QCE_RESULT_BUF_SZ);
dma               102 drivers/crypto/qce/sha.c 	ret = qce_dma_prep_sgs(&qce->dma, req->src, rctx->src_nents,
dma               107 drivers/crypto/qce/sha.c 	qce_dma_issue_pending(&qce->dma);
dma               116 drivers/crypto/qce/sha.c 	qce_dma_terminate_all(&qce->dma);
dma              1933 drivers/crypto/talitos.c 	dma_addr_t dma;
dma              1945 drivers/crypto/talitos.c 	dma = dma_map_single(dev, req_ctx->hw_context, req_ctx->hw_context_size,
dma              1947 drivers/crypto/talitos.c 	dma_unmap_single(dev, dma, req_ctx->hw_context_size, DMA_TO_DEVICE);
dma              2136 drivers/crypto/talitos.c 	dma_addr_t dma;
dma              2138 drivers/crypto/talitos.c 	dma = dma_map_single(dev, req_ctx->hw_context, req_ctx->hw_context_size,
dma              2140 drivers/crypto/talitos.c 	dma_unmap_single(dev, dma, req_ctx->hw_context_size, DMA_FROM_DEVICE);
dma              2162 drivers/crypto/talitos.c 	dma_addr_t dma;
dma              2177 drivers/crypto/talitos.c 	dma = dma_map_single(dev, req_ctx->hw_context, req_ctx->hw_context_size,
dma              2179 drivers/crypto/talitos.c 	dma_unmap_single(dev, dma, req_ctx->hw_context_size, DMA_TO_DEVICE);
dma               247 drivers/crypto/ux500/cryp/cryp.h 	struct cryp_dma dma;
dma               491 drivers/crypto/ux500/cryp/cryp_core.c 	dma_cap_zero(device_data->dma.mask);
dma               492 drivers/crypto/ux500/cryp/cryp_core.c 	dma_cap_set(DMA_SLAVE, device_data->dma.mask);
dma               494 drivers/crypto/ux500/cryp/cryp_core.c 	device_data->dma.cfg_mem2cryp = mem_to_engine;
dma               495 drivers/crypto/ux500/cryp/cryp_core.c 	device_data->dma.chan_mem2cryp =
dma               496 drivers/crypto/ux500/cryp/cryp_core.c 		dma_request_channel(device_data->dma.mask,
dma               498 drivers/crypto/ux500/cryp/cryp_core.c 				    device_data->dma.cfg_mem2cryp);
dma               500 drivers/crypto/ux500/cryp/cryp_core.c 	device_data->dma.cfg_cryp2mem = engine_to_mem;
dma               501 drivers/crypto/ux500/cryp/cryp_core.c 	device_data->dma.chan_cryp2mem =
dma               502 drivers/crypto/ux500/cryp/cryp_core.c 		dma_request_channel(device_data->dma.mask,
dma               504 drivers/crypto/ux500/cryp/cryp_core.c 				    device_data->dma.cfg_cryp2mem);
dma               506 drivers/crypto/ux500/cryp/cryp_core.c 	dmaengine_slave_config(device_data->dma.chan_mem2cryp, &mem2cryp);
dma               507 drivers/crypto/ux500/cryp/cryp_core.c 	dmaengine_slave_config(device_data->dma.chan_cryp2mem, &cryp2mem);
dma               509 drivers/crypto/ux500/cryp/cryp_core.c 	init_completion(&device_data->dma.cryp_dma_complete);
dma               517 drivers/crypto/ux500/cryp/cryp_core.c 	complete(&ctx->device->dma.cryp_dma_complete);
dma               539 drivers/crypto/ux500/cryp/cryp_core.c 		channel = ctx->device->dma.chan_mem2cryp;
dma               540 drivers/crypto/ux500/cryp/cryp_core.c 		ctx->device->dma.sg_src = sg;
dma               541 drivers/crypto/ux500/cryp/cryp_core.c 		ctx->device->dma.sg_src_len = dma_map_sg(channel->device->dev,
dma               542 drivers/crypto/ux500/cryp/cryp_core.c 						 ctx->device->dma.sg_src,
dma               543 drivers/crypto/ux500/cryp/cryp_core.c 						 ctx->device->dma.nents_src,
dma               546 drivers/crypto/ux500/cryp/cryp_core.c 		if (!ctx->device->dma.sg_src_len) {
dma               557 drivers/crypto/ux500/cryp/cryp_core.c 				ctx->device->dma.sg_src,
dma               558 drivers/crypto/ux500/cryp/cryp_core.c 				ctx->device->dma.sg_src_len,
dma               563 drivers/crypto/ux500/cryp/cryp_core.c 		channel = ctx->device->dma.chan_cryp2mem;
dma               564 drivers/crypto/ux500/cryp/cryp_core.c 		ctx->device->dma.sg_dst = sg;
dma               565 drivers/crypto/ux500/cryp/cryp_core.c 		ctx->device->dma.sg_dst_len = dma_map_sg(channel->device->dev,
dma               566 drivers/crypto/ux500/cryp/cryp_core.c 						 ctx->device->dma.sg_dst,
dma               567 drivers/crypto/ux500/cryp/cryp_core.c 						 ctx->device->dma.nents_dst,
dma               570 drivers/crypto/ux500/cryp/cryp_core.c 		if (!ctx->device->dma.sg_dst_len) {
dma               581 drivers/crypto/ux500/cryp/cryp_core.c 				ctx->device->dma.sg_dst,
dma               582 drivers/crypto/ux500/cryp/cryp_core.c 				ctx->device->dma.sg_dst_len,
dma               615 drivers/crypto/ux500/cryp/cryp_core.c 	chan = ctx->device->dma.chan_mem2cryp;
dma               617 drivers/crypto/ux500/cryp/cryp_core.c 	dma_unmap_sg(chan->device->dev, ctx->device->dma.sg_src,
dma               618 drivers/crypto/ux500/cryp/cryp_core.c 		     ctx->device->dma.sg_src_len, DMA_TO_DEVICE);
dma               620 drivers/crypto/ux500/cryp/cryp_core.c 	chan = ctx->device->dma.chan_cryp2mem;
dma               622 drivers/crypto/ux500/cryp/cryp_core.c 	dma_unmap_sg(chan->device->dev, ctx->device->dma.sg_dst,
dma               623 drivers/crypto/ux500/cryp/cryp_core.c 		     ctx->device->dma.sg_dst_len, DMA_FROM_DEVICE);
dma               855 drivers/crypto/ux500/cryp/cryp_core.c 	ctx->device->dma.nents_src = get_nents(areq->src, ctx->datalen);
dma               856 drivers/crypto/ux500/cryp/cryp_core.c 	ctx->device->dma.nents_dst = get_nents(areq->dst, ctx->outlen);
dma               864 drivers/crypto/ux500/cryp/cryp_core.c 	wait_for_completion(&ctx->device->dma.cryp_dma_complete);
dma               377 drivers/crypto/ux500/hash/hash_alg.h 	struct hash_dma		dma;
dma               117 drivers/crypto/ux500/hash/hash_core.c 	dma_cap_zero(device_data->dma.mask);
dma               118 drivers/crypto/ux500/hash/hash_core.c 	dma_cap_set(DMA_SLAVE, device_data->dma.mask);
dma               120 drivers/crypto/ux500/hash/hash_core.c 	device_data->dma.cfg_mem2hash = platform_data->mem_to_engine;
dma               121 drivers/crypto/ux500/hash/hash_core.c 	device_data->dma.chan_mem2hash =
dma               122 drivers/crypto/ux500/hash/hash_core.c 		dma_request_channel(device_data->dma.mask,
dma               124 drivers/crypto/ux500/hash/hash_core.c 				    device_data->dma.cfg_mem2hash);
dma               126 drivers/crypto/ux500/hash/hash_core.c 	dmaengine_slave_config(device_data->dma.chan_mem2hash, &conf);
dma               128 drivers/crypto/ux500/hash/hash_core.c 	init_completion(&device_data->dma.complete);
dma               135 drivers/crypto/ux500/hash/hash_core.c 	complete(&ctx->device->dma.complete);
dma               153 drivers/crypto/ux500/hash/hash_core.c 	channel = ctx->device->dma.chan_mem2hash;
dma               154 drivers/crypto/ux500/hash/hash_core.c 	ctx->device->dma.sg = sg;
dma               155 drivers/crypto/ux500/hash/hash_core.c 	ctx->device->dma.sg_len = dma_map_sg(channel->device->dev,
dma               156 drivers/crypto/ux500/hash/hash_core.c 			ctx->device->dma.sg, ctx->device->dma.nents,
dma               159 drivers/crypto/ux500/hash/hash_core.c 	if (!ctx->device->dma.sg_len) {
dma               168 drivers/crypto/ux500/hash/hash_core.c 			ctx->device->dma.sg, ctx->device->dma.sg_len,
dma               189 drivers/crypto/ux500/hash/hash_core.c 	chan = ctx->device->dma.chan_mem2hash;
dma               191 drivers/crypto/ux500/hash/hash_core.c 	dma_unmap_sg(chan->device->dev, ctx->device->dma.sg,
dma               192 drivers/crypto/ux500/hash/hash_core.c 		     ctx->device->dma.sg_len, DMA_TO_DEVICE);
dma               911 drivers/crypto/ux500/hash/hash_core.c 	ctx->device->dma.nents = hash_get_nents(req->src, req->nbytes, NULL);
dma               912 drivers/crypto/ux500/hash/hash_core.c 	if (!ctx->device->dma.nents) {
dma               915 drivers/crypto/ux500/hash/hash_core.c 		ret = ctx->device->dma.nents;
dma               927 drivers/crypto/ux500/hash/hash_core.c 	wait_for_completion(&ctx->device->dma.complete);
dma                 3 drivers/dma-buf/sync_trace.h #define TRACE_INCLUDE_PATH ../../drivers/dma-buf
dma               336 drivers/dma/acpi-dma.c 		struct acpi_resource_fixed_dma *dma = &res->data.fixed_dma;
dma               339 drivers/dma/acpi-dma.c 			pdata->dma_spec.chan_id = dma->channels;
dma               340 drivers/dma/acpi-dma.c 			pdata->dma_spec.slave_id = dma->request_lines;
dma               209 drivers/dma/at_xdmac.c 	struct dma_device	dma;
dma               271 drivers/dma/at_xdmac.c 	return container_of(ddev, struct at_xdmac, dma);
dma               517 drivers/dma/at_xdmac.c 	struct device		*dev = atxdmac->dma.dev;
dma               524 drivers/dma/at_xdmac.c 	chan = dma_get_any_slave_channel(&atxdmac->dma);
dma              1684 drivers/dma/at_xdmac.c 		dev_vdbg(atxdmac->dma.dev,
dma              1692 drivers/dma/at_xdmac.c 		for (i = 0; i < atxdmac->dma.chancnt; i++) {
dma              1700 drivers/dma/at_xdmac.c 			dev_vdbg(atxdmac->dma.dev,
dma              1883 drivers/dma/at_xdmac.c 	list_for_each_entry_safe(chan, _chan, &atxdmac->dma.channels, device_node) {
dma              1902 drivers/dma/at_xdmac.c 	list_for_each_entry_safe(chan, _chan, &atxdmac->dma.channels, device_node) {
dma              1934 drivers/dma/at_xdmac.c 	for (i = 0; i < atxdmac->dma.chancnt; i++) {
dma              1941 drivers/dma/at_xdmac.c 	list_for_each_entry_safe(chan, _chan, &atxdmac->dma.channels, device_node) {
dma              2030 drivers/dma/at_xdmac.c 	dma_cap_set(DMA_CYCLIC, atxdmac->dma.cap_mask);
dma              2031 drivers/dma/at_xdmac.c 	dma_cap_set(DMA_INTERLEAVE, atxdmac->dma.cap_mask);
dma              2032 drivers/dma/at_xdmac.c 	dma_cap_set(DMA_MEMCPY, atxdmac->dma.cap_mask);
dma              2033 drivers/dma/at_xdmac.c 	dma_cap_set(DMA_MEMSET, atxdmac->dma.cap_mask);
dma              2034 drivers/dma/at_xdmac.c 	dma_cap_set(DMA_MEMSET_SG, atxdmac->dma.cap_mask);
dma              2035 drivers/dma/at_xdmac.c 	dma_cap_set(DMA_SLAVE, atxdmac->dma.cap_mask);
dma              2040 drivers/dma/at_xdmac.c 	dma_cap_set(DMA_PRIVATE, atxdmac->dma.cap_mask);
dma              2041 drivers/dma/at_xdmac.c 	atxdmac->dma.dev				= &pdev->dev;
dma              2042 drivers/dma/at_xdmac.c 	atxdmac->dma.device_alloc_chan_resources	= at_xdmac_alloc_chan_resources;
dma              2043 drivers/dma/at_xdmac.c 	atxdmac->dma.device_free_chan_resources		= at_xdmac_free_chan_resources;
dma              2044 drivers/dma/at_xdmac.c 	atxdmac->dma.device_tx_status			= at_xdmac_tx_status;
dma              2045 drivers/dma/at_xdmac.c 	atxdmac->dma.device_issue_pending		= at_xdmac_issue_pending;
dma              2046 drivers/dma/at_xdmac.c 	atxdmac->dma.device_prep_dma_cyclic		= at_xdmac_prep_dma_cyclic;
dma              2047 drivers/dma/at_xdmac.c 	atxdmac->dma.device_prep_interleaved_dma	= at_xdmac_prep_interleaved;
dma              2048 drivers/dma/at_xdmac.c 	atxdmac->dma.device_prep_dma_memcpy		= at_xdmac_prep_dma_memcpy;
dma              2049 drivers/dma/at_xdmac.c 	atxdmac->dma.device_prep_dma_memset		= at_xdmac_prep_dma_memset;
dma              2050 drivers/dma/at_xdmac.c 	atxdmac->dma.device_prep_dma_memset_sg		= at_xdmac_prep_dma_memset_sg;
dma              2051 drivers/dma/at_xdmac.c 	atxdmac->dma.device_prep_slave_sg		= at_xdmac_prep_slave_sg;
dma              2052 drivers/dma/at_xdmac.c 	atxdmac->dma.device_config			= at_xdmac_device_config;
dma              2053 drivers/dma/at_xdmac.c 	atxdmac->dma.device_pause			= at_xdmac_device_pause;
dma              2054 drivers/dma/at_xdmac.c 	atxdmac->dma.device_resume			= at_xdmac_device_resume;
dma              2055 drivers/dma/at_xdmac.c 	atxdmac->dma.device_terminate_all		= at_xdmac_device_terminate_all;
dma              2056 drivers/dma/at_xdmac.c 	atxdmac->dma.src_addr_widths = AT_XDMAC_DMA_BUSWIDTHS;
dma              2057 drivers/dma/at_xdmac.c 	atxdmac->dma.dst_addr_widths = AT_XDMAC_DMA_BUSWIDTHS;
dma              2058 drivers/dma/at_xdmac.c 	atxdmac->dma.directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV);
dma              2059 drivers/dma/at_xdmac.c 	atxdmac->dma.residue_granularity = DMA_RESIDUE_GRANULARITY_BURST;
dma              2065 drivers/dma/at_xdmac.c 	INIT_LIST_HEAD(&atxdmac->dma.channels);
dma              2069 drivers/dma/at_xdmac.c 		atchan->chan.device = &atxdmac->dma;
dma              2071 drivers/dma/at_xdmac.c 			      &atxdmac->dma.channels);
dma              2088 drivers/dma/at_xdmac.c 	ret = dma_async_device_register(&atxdmac->dma);
dma              2107 drivers/dma/at_xdmac.c 	dma_async_device_unregister(&atxdmac->dma);
dma              2122 drivers/dma/at_xdmac.c 	dma_async_device_unregister(&atxdmac->dma);
dma              2127 drivers/dma/at_xdmac.c 	for (i = 0; i < atxdmac->dma.chancnt; i++) {
dma               548 drivers/dma/bcm2835-dma.c 		dma_addr_t dma;
dma               551 drivers/dma/bcm2835-dma.c 			dma = control_block->dst;
dma               553 drivers/dma/bcm2835-dma.c 			dma = control_block->src;
dma               557 drivers/dma/bcm2835-dma.c 		else if (addr >= dma && addr < dma + this_size)
dma               558 drivers/dma/bcm2835-dma.c 			size += dma + this_size - addr;
dma              2589 drivers/dma/coh901318.c static void coh901318_base_init(struct dma_device *dma, const int *pick_chans,
dma              2596 drivers/dma/coh901318.c 	INIT_LIST_HEAD(&dma->channels);
dma              2603 drivers/dma/coh901318.c 			cohc->chan.device = dma;
dma              2622 drivers/dma/coh901318.c 				      &dma->channels);
dma               885 drivers/dma/dw-axi-dmac/dw-axi-dmac-platform.c 	INIT_LIST_HEAD(&dw->dma.channels);
dma               895 drivers/dma/dw-axi-dmac/dw-axi-dmac-platform.c 		vchan_init(&chan->vc, &dw->dma);
dma               899 drivers/dma/dw-axi-dmac/dw-axi-dmac-platform.c 	dma_cap_set(DMA_MEMCPY, dw->dma.cap_mask);
dma               902 drivers/dma/dw-axi-dmac/dw-axi-dmac-platform.c 	dw->dma.chancnt = hdata->nr_channels;
dma               903 drivers/dma/dw-axi-dmac/dw-axi-dmac-platform.c 	dw->dma.src_addr_widths = AXI_DMA_BUSWIDTHS;
dma               904 drivers/dma/dw-axi-dmac/dw-axi-dmac-platform.c 	dw->dma.dst_addr_widths = AXI_DMA_BUSWIDTHS;
dma               905 drivers/dma/dw-axi-dmac/dw-axi-dmac-platform.c 	dw->dma.directions = BIT(DMA_MEM_TO_MEM);
dma               906 drivers/dma/dw-axi-dmac/dw-axi-dmac-platform.c 	dw->dma.residue_granularity = DMA_RESIDUE_GRANULARITY_DESCRIPTOR;
dma               908 drivers/dma/dw-axi-dmac/dw-axi-dmac-platform.c 	dw->dma.dev = chip->dev;
dma               909 drivers/dma/dw-axi-dmac/dw-axi-dmac-platform.c 	dw->dma.device_tx_status = dma_chan_tx_status;
dma               910 drivers/dma/dw-axi-dmac/dw-axi-dmac-platform.c 	dw->dma.device_issue_pending = dma_chan_issue_pending;
dma               911 drivers/dma/dw-axi-dmac/dw-axi-dmac-platform.c 	dw->dma.device_terminate_all = dma_chan_terminate_all;
dma               912 drivers/dma/dw-axi-dmac/dw-axi-dmac-platform.c 	dw->dma.device_pause = dma_chan_pause;
dma               913 drivers/dma/dw-axi-dmac/dw-axi-dmac-platform.c 	dw->dma.device_resume = dma_chan_resume;
dma               915 drivers/dma/dw-axi-dmac/dw-axi-dmac-platform.c 	dw->dma.device_alloc_chan_resources = dma_chan_alloc_chan_resources;
dma               916 drivers/dma/dw-axi-dmac/dw-axi-dmac-platform.c 	dw->dma.device_free_chan_resources = dma_chan_free_chan_resources;
dma               918 drivers/dma/dw-axi-dmac/dw-axi-dmac-platform.c 	dw->dma.device_prep_dma_memcpy = dma_chan_prep_dma_memcpy;
dma               938 drivers/dma/dw-axi-dmac/dw-axi-dmac-platform.c 	ret = dmaenginem_async_device_register(&dw->dma);
dma               975 drivers/dma/dw-axi-dmac/dw-axi-dmac-platform.c 	list_for_each_entry_safe(chan, _chan, &dw->dma.channels,
dma                49 drivers/dma/dw-axi-dmac/dw-axi-dmac.h 	struct dma_device	dma;
dma               632 drivers/dma/dw-edma/dw-edma-core.c 	struct dma_device *dma;
dma               651 drivers/dma/dw-edma/dw-edma-core.c 		dma = &dw->wr_edma;
dma               657 drivers/dma/dw-edma/dw-edma-core.c 		dma = &dw->rd_edma;
dma               662 drivers/dma/dw-edma/dw-edma-core.c 	INIT_LIST_HEAD(&dma->channels);
dma               709 drivers/dma/dw-edma/dw-edma-core.c 		vchan_init(&chan->vc, dma);
dma               722 drivers/dma/dw-edma/dw-edma-core.c 	dma_cap_zero(dma->cap_mask);
dma               723 drivers/dma/dw-edma/dw-edma-core.c 	dma_cap_set(DMA_SLAVE, dma->cap_mask);
dma               724 drivers/dma/dw-edma/dw-edma-core.c 	dma_cap_set(DMA_CYCLIC, dma->cap_mask);
dma               725 drivers/dma/dw-edma/dw-edma-core.c 	dma_cap_set(DMA_PRIVATE, dma->cap_mask);
dma               726 drivers/dma/dw-edma/dw-edma-core.c 	dma->directions = BIT(write ? DMA_DEV_TO_MEM : DMA_MEM_TO_DEV);
dma               727 drivers/dma/dw-edma/dw-edma-core.c 	dma->src_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_4_BYTES);
dma               728 drivers/dma/dw-edma/dw-edma-core.c 	dma->dst_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_4_BYTES);
dma               729 drivers/dma/dw-edma/dw-edma-core.c 	dma->residue_granularity = DMA_RESIDUE_GRANULARITY_DESCRIPTOR;
dma               730 drivers/dma/dw-edma/dw-edma-core.c 	dma->chancnt = cnt;
dma               733 drivers/dma/dw-edma/dw-edma-core.c 	dma->dev = chip->dev;
dma               734 drivers/dma/dw-edma/dw-edma-core.c 	dma->device_alloc_chan_resources = dw_edma_alloc_chan_resources;
dma               735 drivers/dma/dw-edma/dw-edma-core.c 	dma->device_free_chan_resources = dw_edma_free_chan_resources;
dma               736 drivers/dma/dw-edma/dw-edma-core.c 	dma->device_config = dw_edma_device_config;
dma               737 drivers/dma/dw-edma/dw-edma-core.c 	dma->device_pause = dw_edma_device_pause;
dma               738 drivers/dma/dw-edma/dw-edma-core.c 	dma->device_resume = dw_edma_device_resume;
dma               739 drivers/dma/dw-edma/dw-edma-core.c 	dma->device_terminate_all = dw_edma_device_terminate_all;
dma               740 drivers/dma/dw-edma/dw-edma-core.c 	dma->device_issue_pending = dw_edma_device_issue_pending;
dma               741 drivers/dma/dw-edma/dw-edma-core.c 	dma->device_tx_status = dw_edma_device_tx_status;
dma               742 drivers/dma/dw-edma/dw-edma-core.c 	dma->device_prep_slave_sg = dw_edma_device_prep_slave_sg;
dma               743 drivers/dma/dw-edma/dw-edma-core.c 	dma->device_prep_dma_cyclic = dw_edma_device_prep_dma_cyclic;
dma               745 drivers/dma/dw-edma/dw-edma-core.c 	dma_set_max_seg_size(dma->dev, U32_MAX);
dma               748 drivers/dma/dw-edma/dw-edma-core.c 	err = dma_async_device_register(dma);
dma                25 drivers/dma/dw/acpi.c 	struct device *dev = dw->dma.dev;
dma                47 drivers/dma/dw/acpi.c 	struct device *dev = dw->dma.dev;
dma               482 drivers/dma/dw/core.c 	dev_vdbg(dw->dma.dev, "%s: status_err=%x\n", __func__, status_err);
dma               484 drivers/dma/dw/core.c 	for (i = 0; i < dw->dma.chancnt; i++) {
dma               487 drivers/dma/dw/core.c 			dev_vdbg(dw->dma.dev, "Cyclic xfer is not implemented\n");
dma               509 drivers/dma/dw/core.c 	dev_vdbg(dw->dma.dev, "%s: status=0x%x\n", __func__, status);
dma               525 drivers/dma/dw/core.c 		dev_err(dw->dma.dev,
dma               970 drivers/dma/dw/core.c 	for (i = 0; i < dw->dma.chancnt; i++)
dma              1136 drivers/dma/dw/core.c 	INIT_LIST_HEAD(&dw->dma.channels);
dma              1140 drivers/dma/dw/core.c 		dwc->chan.device = &dw->dma;
dma              1144 drivers/dma/dw/core.c 					&dw->dma.channels);
dma              1146 drivers/dma/dw/core.c 			list_add(&dwc->chan.device_node, &dw->dma.channels);
dma              1197 drivers/dma/dw/core.c 	dma_cap_set(DMA_SLAVE, dw->dma.cap_mask);
dma              1198 drivers/dma/dw/core.c 	dma_cap_set(DMA_PRIVATE, dw->dma.cap_mask);
dma              1199 drivers/dma/dw/core.c 	dma_cap_set(DMA_MEMCPY, dw->dma.cap_mask);
dma              1201 drivers/dma/dw/core.c 	dw->dma.dev = chip->dev;
dma              1202 drivers/dma/dw/core.c 	dw->dma.device_alloc_chan_resources = dwc_alloc_chan_resources;
dma              1203 drivers/dma/dw/core.c 	dw->dma.device_free_chan_resources = dwc_free_chan_resources;
dma              1205 drivers/dma/dw/core.c 	dw->dma.device_prep_dma_memcpy = dwc_prep_dma_memcpy;
dma              1206 drivers/dma/dw/core.c 	dw->dma.device_prep_slave_sg = dwc_prep_slave_sg;
dma              1208 drivers/dma/dw/core.c 	dw->dma.device_config = dwc_config;
dma              1209 drivers/dma/dw/core.c 	dw->dma.device_pause = dwc_pause;
dma              1210 drivers/dma/dw/core.c 	dw->dma.device_resume = dwc_resume;
dma              1211 drivers/dma/dw/core.c 	dw->dma.device_terminate_all = dwc_terminate_all;
dma              1213 drivers/dma/dw/core.c 	dw->dma.device_tx_status = dwc_tx_status;
dma              1214 drivers/dma/dw/core.c 	dw->dma.device_issue_pending = dwc_issue_pending;
dma              1217 drivers/dma/dw/core.c 	dw->dma.src_addr_widths = DW_DMA_BUSWIDTHS;
dma              1218 drivers/dma/dw/core.c 	dw->dma.dst_addr_widths = DW_DMA_BUSWIDTHS;
dma              1219 drivers/dma/dw/core.c 	dw->dma.directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV) |
dma              1221 drivers/dma/dw/core.c 	dw->dma.residue_granularity = DMA_RESIDUE_GRANULARITY_BURST;
dma              1223 drivers/dma/dw/core.c 	err = dma_async_device_register(&dw->dma);
dma              1249 drivers/dma/dw/core.c 	dma_async_device_unregister(&dw->dma);
dma              1254 drivers/dma/dw/core.c 	list_for_each_entry_safe(dwc, _dwc, &dw->dma.channels,
dma                21 drivers/dma/dw/of.c 		.dma_dev = dw->dma.dev,
dma               112 drivers/dma/dw/of.c 	struct device *dev = dw->dma.dev;
dma               125 drivers/dma/dw/of.c 	struct device *dev = dw->dma.dev;
dma               311 drivers/dma/dw/regs.h 	struct dma_device	dma;
dma               363 drivers/dma/dw/regs.h 	return container_of(ddev, struct dw_dma, dma);
dma               437 drivers/dma/hsu/hsu.c 	INIT_LIST_HEAD(&hsu->dma.channels);
dma               442 drivers/dma/hsu/hsu.c 		vchan_init(&hsuc->vchan, &hsu->dma);
dma               448 drivers/dma/hsu/hsu.c 	dma_cap_set(DMA_SLAVE, hsu->dma.cap_mask);
dma               449 drivers/dma/hsu/hsu.c 	dma_cap_set(DMA_PRIVATE, hsu->dma.cap_mask);
dma               451 drivers/dma/hsu/hsu.c 	hsu->dma.device_free_chan_resources = hsu_dma_free_chan_resources;
dma               453 drivers/dma/hsu/hsu.c 	hsu->dma.device_prep_slave_sg = hsu_dma_prep_slave_sg;
dma               455 drivers/dma/hsu/hsu.c 	hsu->dma.device_issue_pending = hsu_dma_issue_pending;
dma               456 drivers/dma/hsu/hsu.c 	hsu->dma.device_tx_status = hsu_dma_tx_status;
dma               458 drivers/dma/hsu/hsu.c 	hsu->dma.device_config = hsu_dma_slave_config;
dma               459 drivers/dma/hsu/hsu.c 	hsu->dma.device_pause = hsu_dma_pause;
dma               460 drivers/dma/hsu/hsu.c 	hsu->dma.device_resume = hsu_dma_resume;
dma               461 drivers/dma/hsu/hsu.c 	hsu->dma.device_terminate_all = hsu_dma_terminate_all;
dma               462 drivers/dma/hsu/hsu.c 	hsu->dma.device_synchronize = hsu_dma_synchronize;
dma               464 drivers/dma/hsu/hsu.c 	hsu->dma.src_addr_widths = HSU_DMA_BUSWIDTHS;
dma               465 drivers/dma/hsu/hsu.c 	hsu->dma.dst_addr_widths = HSU_DMA_BUSWIDTHS;
dma               466 drivers/dma/hsu/hsu.c 	hsu->dma.directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV);
dma               467 drivers/dma/hsu/hsu.c 	hsu->dma.residue_granularity = DMA_RESIDUE_GRANULARITY_BURST;
dma               469 drivers/dma/hsu/hsu.c 	hsu->dma.dev = chip->dev;
dma               471 drivers/dma/hsu/hsu.c 	dma_set_max_seg_size(hsu->dma.dev, HSU_CH_DxTSR_MASK);
dma               473 drivers/dma/hsu/hsu.c 	ret = dma_async_device_register(&hsu->dma);
dma               487 drivers/dma/hsu/hsu.c 	dma_async_device_unregister(&hsu->dma);
dma               111 drivers/dma/hsu/hsu.h 	struct dma_device		dma;
dma               120 drivers/dma/hsu/hsu.h 	return container_of(ddev, struct hsu_dma, dma);
dma               170 drivers/dma/idma64.c 	dev_vdbg(idma64->dma.dev, "%s: status=%#x\n", __func__, status);
dma               179 drivers/dma/idma64.c 	for (i = 0; i < idma64->dma.chancnt; i++)
dma               558 drivers/dma/idma64.c 	INIT_LIST_HEAD(&idma64->dma.channels);
dma               563 drivers/dma/idma64.c 		vchan_init(&idma64c->vchan, &idma64->dma);
dma               569 drivers/dma/idma64.c 	dma_cap_set(DMA_SLAVE, idma64->dma.cap_mask);
dma               570 drivers/dma/idma64.c 	dma_cap_set(DMA_PRIVATE, idma64->dma.cap_mask);
dma               572 drivers/dma/idma64.c 	idma64->dma.device_alloc_chan_resources = idma64_alloc_chan_resources;
dma               573 drivers/dma/idma64.c 	idma64->dma.device_free_chan_resources = idma64_free_chan_resources;
dma               575 drivers/dma/idma64.c 	idma64->dma.device_prep_slave_sg = idma64_prep_slave_sg;
dma               577 drivers/dma/idma64.c 	idma64->dma.device_issue_pending = idma64_issue_pending;
dma               578 drivers/dma/idma64.c 	idma64->dma.device_tx_status = idma64_tx_status;
dma               580 drivers/dma/idma64.c 	idma64->dma.device_config = idma64_slave_config;
dma               581 drivers/dma/idma64.c 	idma64->dma.device_pause = idma64_pause;
dma               582 drivers/dma/idma64.c 	idma64->dma.device_resume = idma64_resume;
dma               583 drivers/dma/idma64.c 	idma64->dma.device_terminate_all = idma64_terminate_all;
dma               584 drivers/dma/idma64.c 	idma64->dma.device_synchronize = idma64_synchronize;
dma               586 drivers/dma/idma64.c 	idma64->dma.src_addr_widths = IDMA64_BUSWIDTHS;
dma               587 drivers/dma/idma64.c 	idma64->dma.dst_addr_widths = IDMA64_BUSWIDTHS;
dma               588 drivers/dma/idma64.c 	idma64->dma.directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV);
dma               589 drivers/dma/idma64.c 	idma64->dma.residue_granularity = DMA_RESIDUE_GRANULARITY_BURST;
dma               591 drivers/dma/idma64.c 	idma64->dma.dev = chip->sysdev;
dma               593 drivers/dma/idma64.c 	dma_set_max_seg_size(idma64->dma.dev, IDMA64C_CTLH_BLOCK_TS_MASK);
dma               595 drivers/dma/idma64.c 	ret = dma_async_device_register(&idma64->dma);
dma               608 drivers/dma/idma64.c 	dma_async_device_unregister(&idma64->dma);
dma               616 drivers/dma/idma64.c 	for (i = 0; i < idma64->dma.chancnt; i++) {
dma               184 drivers/dma/idma64.h 	struct dma_device dma;
dma               195 drivers/dma/idma64.h 	return container_of(ddev, struct idma64, dma);
dma               511 drivers/dma/ioat/dma.c 	dma_pool_free(ioat_dma->sed_hw_pool[sed->hw_pool], sed->hw, sed->dma);
dma               152 drivers/dma/ioat/dma.h 	dma_addr_t dma;
dma               305 drivers/dma/ioat/init.c 	struct dma_device *dma = &ioat_dma->dma_dev;
dma               330 drivers/dma/ioat/init.c 	dma_chan = container_of(dma->channels.next, struct dma_chan,
dma               332 drivers/dma/ioat/init.c 	if (dma->device_alloc_chan_resources(dma_chan) < 1) {
dma               370 drivers/dma/ioat/init.c 	dma->device_issue_pending(dma_chan);
dma               375 drivers/dma/ioat/init.c 	    dma->device_tx_status(dma_chan, cookie, NULL)
dma               392 drivers/dma/ioat/init.c 	dma->device_free_chan_resources(dma_chan);
dma               496 drivers/dma/ioat/init.c 	struct dma_device *dma = &ioat_dma->dma_dev;
dma               512 drivers/dma/ioat/init.c 	dma_cap_set(DMA_MEMCPY, dma->cap_mask);
dma               513 drivers/dma/ioat/init.c 	dma->dev = &pdev->dev;
dma               515 drivers/dma/ioat/init.c 	if (!dma->chancnt) {
dma               552 drivers/dma/ioat/init.c 	struct dma_device *dma = &ioat_dma->dma_dev;
dma               558 drivers/dma/ioat/init.c 	dma_async_device_unregister(dma);
dma               562 drivers/dma/ioat/init.c 	INIT_LIST_HEAD(&dma->channels);
dma               573 drivers/dma/ioat/init.c 	struct dma_device *dma = &ioat_dma->dma_dev;
dma               577 drivers/dma/ioat/init.c 	INIT_LIST_HEAD(&dma->channels);
dma               578 drivers/dma/ioat/init.c 	dma->chancnt = readb(ioat_dma->reg_base + IOAT_CHANCNT_OFFSET);
dma               579 drivers/dma/ioat/init.c 	dma->chancnt &= 0x1f; /* bits [4:0] valid */
dma               580 drivers/dma/ioat/init.c 	if (dma->chancnt > ARRAY_SIZE(ioat_dma->idx)) {
dma               582 drivers/dma/ioat/init.c 			 dma->chancnt, ARRAY_SIZE(ioat_dma->idx));
dma               583 drivers/dma/ioat/init.c 		dma->chancnt = ARRAY_SIZE(ioat_dma->idx);
dma               591 drivers/dma/ioat/init.c 	for (i = 0; i < dma->chancnt; i++) {
dma               604 drivers/dma/ioat/init.c 	dma->chancnt = i;
dma               769 drivers/dma/ioat/init.c 	struct dma_device *dma = &ioat_dma->dma_dev;
dma               776 drivers/dma/ioat/init.c 	ioat_chan->dma_chan.device = dma;
dma               778 drivers/dma/ioat/init.c 	list_add_tail(&ioat_chan->dma_chan.device_node, &dma->channels);
dma               803 drivers/dma/ioat/init.c 	struct dma_device *dma = &ioat_dma->dma_dev;
dma               808 drivers/dma/ioat/init.c 	if (!dma_has_cap(DMA_XOR, dma->cap_mask))
dma               843 drivers/dma/ioat/init.c 	dma_chan = container_of(dma->channels.next, struct dma_chan,
dma               845 drivers/dma/ioat/init.c 	if (dma->device_alloc_chan_resources(dma_chan) < 1) {
dma               867 drivers/dma/ioat/init.c 	tx = dma->device_prep_dma_xor(dma_chan, dest_dma, dma_srcs,
dma               887 drivers/dma/ioat/init.c 	dma->device_issue_pending(dma_chan);
dma               892 drivers/dma/ioat/init.c 	    dma->device_tx_status(dma_chan, cookie, NULL) != DMA_COMPLETE) {
dma               936 drivers/dma/ioat/init.c 	tx = dma->device_prep_dma_xor_val(dma_chan, dma_srcs,
dma               955 drivers/dma/ioat/init.c 	dma->device_issue_pending(dma_chan);
dma               960 drivers/dma/ioat/init.c 	    dma->device_tx_status(dma_chan, cookie, NULL) != DMA_COMPLETE) {
dma               989 drivers/dma/ioat/init.c 	tx = dma->device_prep_dma_xor_val(dma_chan, dma_srcs,
dma              1008 drivers/dma/ioat/init.c 	dma->device_issue_pending(dma_chan);
dma              1013 drivers/dma/ioat/init.c 	    dma->device_tx_status(dma_chan, cookie, NULL) != DMA_COMPLETE) {
dma              1041 drivers/dma/ioat/init.c 	dma->device_free_chan_resources(dma_chan);
dma              1065 drivers/dma/ioat/init.c 	struct dma_device *dma;
dma              1070 drivers/dma/ioat/init.c 	dma = &ioat_dma->dma_dev;
dma              1077 drivers/dma/ioat/init.c 		list_for_each_entry(c, &dma->channels, device_node) {
dma              1093 drivers/dma/ioat/init.c 	struct dma_device *dma;
dma              1099 drivers/dma/ioat/init.c 	dma = &ioat_dma->dma_dev;
dma              1100 drivers/dma/ioat/init.c 	dma->device_prep_dma_memcpy = ioat_dma_prep_memcpy_lock;
dma              1101 drivers/dma/ioat/init.c 	dma->device_issue_pending = ioat_issue_pending;
dma              1102 drivers/dma/ioat/init.c 	dma->device_alloc_chan_resources = ioat_alloc_chan_resources;
dma              1103 drivers/dma/ioat/init.c 	dma->device_free_chan_resources = ioat_free_chan_resources;
dma              1105 drivers/dma/ioat/init.c 	dma_cap_set(DMA_INTERRUPT, dma->cap_mask);
dma              1106 drivers/dma/ioat/init.c 	dma->device_prep_dma_interrupt = ioat_prep_interrupt_lock;
dma              1119 drivers/dma/ioat/init.c 		dma->max_xor = 8;
dma              1121 drivers/dma/ioat/init.c 		dma_cap_set(DMA_XOR, dma->cap_mask);
dma              1122 drivers/dma/ioat/init.c 		dma->device_prep_dma_xor = ioat_prep_xor;
dma              1124 drivers/dma/ioat/init.c 		dma_cap_set(DMA_XOR_VAL, dma->cap_mask);
dma              1125 drivers/dma/ioat/init.c 		dma->device_prep_dma_xor_val = ioat_prep_xor_val;
dma              1130 drivers/dma/ioat/init.c 		dma->device_prep_dma_pq = ioat_prep_pq;
dma              1131 drivers/dma/ioat/init.c 		dma->device_prep_dma_pq_val = ioat_prep_pq_val;
dma              1132 drivers/dma/ioat/init.c 		dma_cap_set(DMA_PQ, dma->cap_mask);
dma              1133 drivers/dma/ioat/init.c 		dma_cap_set(DMA_PQ_VAL, dma->cap_mask);
dma              1136 drivers/dma/ioat/init.c 			dma_set_maxpq(dma, 16, 0);
dma              1138 drivers/dma/ioat/init.c 			dma_set_maxpq(dma, 8, 0);
dma              1141 drivers/dma/ioat/init.c 			dma->device_prep_dma_xor = ioat_prep_pqxor;
dma              1142 drivers/dma/ioat/init.c 			dma->device_prep_dma_xor_val = ioat_prep_pqxor_val;
dma              1143 drivers/dma/ioat/init.c 			dma_cap_set(DMA_XOR, dma->cap_mask);
dma              1144 drivers/dma/ioat/init.c 			dma_cap_set(DMA_XOR_VAL, dma->cap_mask);
dma              1147 drivers/dma/ioat/init.c 				dma->max_xor = 16;
dma              1149 drivers/dma/ioat/init.c 				dma->max_xor = 8;
dma              1153 drivers/dma/ioat/init.c 	dma->device_tx_status = ioat_tx_status;
dma              1174 drivers/dma/ioat/init.c 		dma_cap_set(DMA_PRIVATE, dma->cap_mask);
dma              1180 drivers/dma/ioat/init.c 	list_for_each_entry(c, &dma->channels, device_node) {
dma                91 drivers/dma/ioat/prep.c 				 flags, &sed->dma);
dma               512 drivers/dma/ioat/prep.c 		pq->sed_addr = desc->sed->dma;
dma                19 drivers/dma/ioat/sysfs.c 	struct dma_device *dma = c->device;
dma                22 drivers/dma/ioat/sysfs.c 		       dma_has_cap(DMA_PQ, dma->cap_mask) ? " pq" : "",
dma                23 drivers/dma/ioat/sysfs.c 		       dma_has_cap(DMA_PQ_VAL, dma->cap_mask) ? " pq_val" : "",
dma                24 drivers/dma/ioat/sysfs.c 		       dma_has_cap(DMA_XOR, dma->cap_mask) ? " xor" : "",
dma                25 drivers/dma/ioat/sysfs.c 		       dma_has_cap(DMA_XOR_VAL, dma->cap_mask) ? " xor_val" : "",
dma                26 drivers/dma/ioat/sysfs.c 		       dma_has_cap(DMA_INTERRUPT, dma->cap_mask) ? " intr" : "");
dma                33 drivers/dma/ioat/sysfs.c 	struct dma_device *dma = c->device;
dma                34 drivers/dma/ioat/sysfs.c 	struct ioatdma_device *ioat_dma = to_ioatdma_device(dma);
dma                77 drivers/dma/ioat/sysfs.c 	struct dma_device *dma = &ioat_dma->dma_dev;
dma                80 drivers/dma/ioat/sysfs.c 	list_for_each_entry(c, &dma->channels, device_node) {
dma                98 drivers/dma/ioat/sysfs.c 	struct dma_device *dma = &ioat_dma->dma_dev;
dma               101 drivers/dma/ioat/sysfs.c 	list_for_each_entry(c, &dma->channels, device_node) {
dma               166 drivers/dma/iop-adma.h 	struct iop3xx_desc_dma *dma;
dma               386 drivers/dma/iop-adma.h 		return hw_desc.dma->byte_count;
dma               420 drivers/dma/iop-adma.h 		return hw_desc.dma->src_addr;
dma               637 drivers/dma/iop-adma.h 		hw_desc.dma->byte_count = byte_count;
dma               657 drivers/dma/iop-adma.h 		hw_desc.dma->byte_count = 0;
dma               658 drivers/dma/iop-adma.h 		hw_desc.dma->dest_addr = 0;
dma               659 drivers/dma/iop-adma.h 		hw_desc.dma->src_addr = 0;
dma               704 drivers/dma/iop-adma.h 		hw_desc.dma->dest_addr = addr;
dma               758 drivers/dma/iop-adma.h 	iop_paranoia(hw_desc.dma->next_desc);
dma               759 drivers/dma/iop-adma.h 	hw_desc.dma->next_desc = next_desc_addr;
dma               766 drivers/dma/iop-adma.h 	return hw_desc.dma->next_desc;
dma               773 drivers/dma/iop-adma.h 	hw_desc.dma->next_desc = 0;
dma              1611 drivers/dma/ipu/ipu_idmac.c 	struct dma_device *dma = &idmac->dma;
dma              1614 drivers/dma/ipu/ipu_idmac.c 	dma_cap_set(DMA_SLAVE, dma->cap_mask);
dma              1615 drivers/dma/ipu/ipu_idmac.c 	dma_cap_set(DMA_PRIVATE, dma->cap_mask);
dma              1618 drivers/dma/ipu/ipu_idmac.c 	dma->dev				= ipu->dev;
dma              1619 drivers/dma/ipu/ipu_idmac.c 	dma->device_alloc_chan_resources	= idmac_alloc_chan_resources;
dma              1620 drivers/dma/ipu/ipu_idmac.c 	dma->device_free_chan_resources		= idmac_free_chan_resources;
dma              1621 drivers/dma/ipu/ipu_idmac.c 	dma->device_tx_status			= idmac_tx_status;
dma              1622 drivers/dma/ipu/ipu_idmac.c 	dma->device_issue_pending		= idmac_issue_pending;
dma              1625 drivers/dma/ipu/ipu_idmac.c 	dma->device_prep_slave_sg		= idmac_prep_slave_sg;
dma              1626 drivers/dma/ipu/ipu_idmac.c 	dma->device_pause			= idmac_pause;
dma              1627 drivers/dma/ipu/ipu_idmac.c 	dma->device_terminate_all		= idmac_terminate_all;
dma              1629 drivers/dma/ipu/ipu_idmac.c 	INIT_LIST_HEAD(&dma->channels);
dma              1641 drivers/dma/ipu/ipu_idmac.c 		dma_chan->device	= &idmac->dma;
dma              1644 drivers/dma/ipu/ipu_idmac.c 		list_add_tail(&dma_chan->device_node, &dma->channels);
dma              1649 drivers/dma/ipu/ipu_idmac.c 	return dma_async_device_register(&idmac->dma);
dma              1663 drivers/dma/ipu/ipu_idmac.c 	dma_async_device_unregister(&idmac->dma);
dma               146 drivers/dma/ipu/ipu_intern.h 	struct dma_device	dma;
dma               164 drivers/dma/ipu/ipu_intern.h #define to_idmac(d) container_of(d, struct idmac, dma)
dma               508 drivers/dma/moxart-dma.c static void moxart_dma_init(struct dma_device *dma, struct device *dev)
dma               510 drivers/dma/moxart-dma.c 	dma->device_prep_slave_sg		= moxart_prep_slave_sg;
dma               511 drivers/dma/moxart-dma.c 	dma->device_alloc_chan_resources	= moxart_alloc_chan_resources;
dma               512 drivers/dma/moxart-dma.c 	dma->device_free_chan_resources		= moxart_free_chan_resources;
dma               513 drivers/dma/moxart-dma.c 	dma->device_issue_pending		= moxart_issue_pending;
dma               514 drivers/dma/moxart-dma.c 	dma->device_tx_status			= moxart_tx_status;
dma               515 drivers/dma/moxart-dma.c 	dma->device_config			= moxart_slave_config;
dma               516 drivers/dma/moxart-dma.c 	dma->device_terminate_all		= moxart_terminate_all;
dma               517 drivers/dma/moxart-dma.c 	dma->dev				= dev;
dma               519 drivers/dma/moxart-dma.c 	INIT_LIST_HEAD(&dma->channels);
dma               216 drivers/dma/mpc512x_dma.c 	struct dma_device		dma;
dma               362 drivers/dma/mpc512x_dma.c 	if (mdma->dma.chancnt > 32) {
dma               386 drivers/dma/mpc512x_dma.c 	for (i = 0; i < mdma->dma.chancnt; i++) {
dma               430 drivers/dma/mpc512x_dma.c 		dev_err(mdma->dma.dev,
dma               435 drivers/dma/mpc512x_dma.c 			dev_err(mdma->dma.dev, "- Group Priority Error\n");
dma               437 drivers/dma/mpc512x_dma.c 			dev_err(mdma->dma.dev, "- Channel Priority Error\n");
dma               439 drivers/dma/mpc512x_dma.c 			dev_err(mdma->dma.dev, "- Source Address Error\n");
dma               441 drivers/dma/mpc512x_dma.c 			dev_err(mdma->dma.dev, "- Source Offset Configuration Error\n");
dma               443 drivers/dma/mpc512x_dma.c 			dev_err(mdma->dma.dev, "- Destination Address Error\n");
dma               445 drivers/dma/mpc512x_dma.c 			dev_err(mdma->dma.dev, "- Destination Offset Configuration Error\n");
dma               447 drivers/dma/mpc512x_dma.c 			dev_err(mdma->dma.dev, "- NBytes/Citter Configuration Error\n");
dma               449 drivers/dma/mpc512x_dma.c 			dev_err(mdma->dma.dev, "- Scatter/Gather Configuration Error\n");
dma               451 drivers/dma/mpc512x_dma.c 			dev_err(mdma->dma.dev, "- Source Bus Error\n");
dma               453 drivers/dma/mpc512x_dma.c 			dev_err(mdma->dma.dev, "- Destination Bus Error\n");
dma               498 drivers/dma/mpc512x_dma.c 	tcd = dma_alloc_coherent(mdma->dma.dev,
dma               508 drivers/dma/mpc512x_dma.c 			dev_notice(mdma->dma.dev,
dma               525 drivers/dma/mpc512x_dma.c 		dma_free_coherent(mdma->dma.dev,
dma               570 drivers/dma/mpc512x_dma.c 	dma_free_coherent(mdma->dma.dev,
dma               901 drivers/dma/mpc512x_dma.c 	struct dma_device *dma;
dma               976 drivers/dma/mpc512x_dma.c 	dma = &mdma->dma;
dma               977 drivers/dma/mpc512x_dma.c 	dma->dev = dev;
dma               978 drivers/dma/mpc512x_dma.c 	dma->device_alloc_chan_resources = mpc_dma_alloc_chan_resources;
dma               979 drivers/dma/mpc512x_dma.c 	dma->device_free_chan_resources = mpc_dma_free_chan_resources;
dma               980 drivers/dma/mpc512x_dma.c 	dma->device_issue_pending = mpc_dma_issue_pending;
dma               981 drivers/dma/mpc512x_dma.c 	dma->device_tx_status = mpc_dma_tx_status;
dma               982 drivers/dma/mpc512x_dma.c 	dma->device_prep_dma_memcpy = mpc_dma_prep_memcpy;
dma               983 drivers/dma/mpc512x_dma.c 	dma->device_prep_slave_sg = mpc_dma_prep_slave_sg;
dma               984 drivers/dma/mpc512x_dma.c 	dma->device_config = mpc_dma_device_config;
dma               985 drivers/dma/mpc512x_dma.c 	dma->device_terminate_all = mpc_dma_device_terminate_all;
dma               987 drivers/dma/mpc512x_dma.c 	INIT_LIST_HEAD(&dma->channels);
dma               988 drivers/dma/mpc512x_dma.c 	dma_cap_set(DMA_MEMCPY, dma->cap_mask);
dma               989 drivers/dma/mpc512x_dma.c 	dma_cap_set(DMA_SLAVE, dma->cap_mask);
dma               999 drivers/dma/mpc512x_dma.c 		mchan->chan.device = dma;
dma              1009 drivers/dma/mpc512x_dma.c 		list_add_tail(&mchan->chan.device_node, &dma->channels);
dma              1058 drivers/dma/mpc512x_dma.c 	retval = dma_async_device_register(dma);
dma              1093 drivers/dma/mpc512x_dma.c 	dma_async_device_unregister(&mdma->dma);
dma               215 drivers/dma/owl-dma.c 	struct dma_device	dma;
dma               280 drivers/dma/owl-dma.c 	return container_of(dd, struct owl_dma, dma);
dma               474 drivers/dma/owl-dma.c 		dev_warn(od->dma.dev,
dma               586 drivers/dma/owl-dma.c 			dev_dbg(od->dma.dev,
dma               607 drivers/dma/owl-dma.c 			dev_warn(od->dma.dev, "no vchan attached on pchan %d\n",
dma               806 drivers/dma/owl-dma.c 	dev_dbg(od->dma.dev, "allocated pchan %d\n", pchan->id);
dma               902 drivers/dma/owl-dma.c 			dev_err(od->dma.dev,
dma              1015 drivers/dma/owl-dma.c 				 next, &od->dma.channels, vc.chan.device_node) {
dma              1032 drivers/dma/owl-dma.c 	chan = dma_get_any_slave_channel(&od->dma);
dma              1084 drivers/dma/owl-dma.c 	dma_cap_set(DMA_MEMCPY, od->dma.cap_mask);
dma              1085 drivers/dma/owl-dma.c 	dma_cap_set(DMA_SLAVE, od->dma.cap_mask);
dma              1086 drivers/dma/owl-dma.c 	dma_cap_set(DMA_CYCLIC, od->dma.cap_mask);
dma              1088 drivers/dma/owl-dma.c 	od->dma.dev = &pdev->dev;
dma              1089 drivers/dma/owl-dma.c 	od->dma.device_free_chan_resources = owl_dma_free_chan_resources;
dma              1090 drivers/dma/owl-dma.c 	od->dma.device_tx_status = owl_dma_tx_status;
dma              1091 drivers/dma/owl-dma.c 	od->dma.device_issue_pending = owl_dma_issue_pending;
dma              1092 drivers/dma/owl-dma.c 	od->dma.device_prep_dma_memcpy = owl_dma_prep_memcpy;
dma              1093 drivers/dma/owl-dma.c 	od->dma.device_prep_slave_sg = owl_dma_prep_slave_sg;
dma              1094 drivers/dma/owl-dma.c 	od->dma.device_prep_dma_cyclic = owl_prep_dma_cyclic;
dma              1095 drivers/dma/owl-dma.c 	od->dma.device_config = owl_dma_config;
dma              1096 drivers/dma/owl-dma.c 	od->dma.device_pause = owl_dma_pause;
dma              1097 drivers/dma/owl-dma.c 	od->dma.device_resume = owl_dma_resume;
dma              1098 drivers/dma/owl-dma.c 	od->dma.device_terminate_all = owl_dma_terminate_all;
dma              1099 drivers/dma/owl-dma.c 	od->dma.src_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_4_BYTES);
dma              1100 drivers/dma/owl-dma.c 	od->dma.dst_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_4_BYTES);
dma              1101 drivers/dma/owl-dma.c 	od->dma.directions = BIT(DMA_MEM_TO_MEM);
dma              1102 drivers/dma/owl-dma.c 	od->dma.residue_granularity = DMA_RESIDUE_GRANULARITY_BURST;
dma              1104 drivers/dma/owl-dma.c 	INIT_LIST_HEAD(&od->dma.channels);
dma              1148 drivers/dma/owl-dma.c 		vchan_init(&vchan->vc, &od->dma);
dma              1152 drivers/dma/owl-dma.c 	od->lli_pool = dma_pool_create(dev_name(od->dma.dev), od->dma.dev,
dma              1163 drivers/dma/owl-dma.c 	ret = dma_async_device_register(&od->dma);
dma              1180 drivers/dma/owl-dma.c 	dma_async_device_unregister(&od->dma);
dma              1193 drivers/dma/owl-dma.c 	dma_async_device_unregister(&od->dma);
dma              1199 drivers/dma/owl-dma.c 	devm_free_irq(od->dma.dev, od->irq, od);
dma               116 drivers/dma/pch_dma.c 	struct dma_device	dma;
dma               150 drivers/dma/pch_dma.c 	return container_of(ddev, struct pch_dma, dma);
dma               705 drivers/dma/pch_dma.c 	dev_dbg(pd->dma.dev, "pd_irq sts0: %x\n", sts0);
dma               707 drivers/dma/pch_dma.c 	for (i = 0; i < pd->dma.chancnt; i++) {
dma               750 drivers/dma/pch_dma.c 	list_for_each_entry_safe(chan, _c, &pd->dma.channels, device_node) {
dma               773 drivers/dma/pch_dma.c 	list_for_each_entry_safe(chan, _c, &pd->dma.channels, device_node) {
dma               868 drivers/dma/pch_dma.c 	pd->dma.dev = &pdev->dev;
dma               885 drivers/dma/pch_dma.c 	INIT_LIST_HEAD(&pd->dma.channels);
dma               890 drivers/dma/pch_dma.c 		pd_chan->chan.device = &pd->dma;
dma               903 drivers/dma/pch_dma.c 		list_add_tail(&pd_chan->chan.device_node, &pd->dma.channels);
dma               906 drivers/dma/pch_dma.c 	dma_cap_zero(pd->dma.cap_mask);
dma               907 drivers/dma/pch_dma.c 	dma_cap_set(DMA_PRIVATE, pd->dma.cap_mask);
dma               908 drivers/dma/pch_dma.c 	dma_cap_set(DMA_SLAVE, pd->dma.cap_mask);
dma               910 drivers/dma/pch_dma.c 	pd->dma.device_alloc_chan_resources = pd_alloc_chan_resources;
dma               911 drivers/dma/pch_dma.c 	pd->dma.device_free_chan_resources = pd_free_chan_resources;
dma               912 drivers/dma/pch_dma.c 	pd->dma.device_tx_status = pd_tx_status;
dma               913 drivers/dma/pch_dma.c 	pd->dma.device_issue_pending = pd_issue_pending;
dma               914 drivers/dma/pch_dma.c 	pd->dma.device_prep_slave_sg = pd_prep_slave_sg;
dma               915 drivers/dma/pch_dma.c 	pd->dma.device_terminate_all = pd_device_terminate_all;
dma               917 drivers/dma/pch_dma.c 	err = dma_async_device_register(&pd->dma);
dma               947 drivers/dma/pch_dma.c 		dma_async_device_unregister(&pd->dma);
dma               951 drivers/dma/pch_dma.c 		list_for_each_entry_safe(chan, _c, &pd->dma.channels,
dma               521 drivers/dma/pxa_dma.c 	dma_addr_t dma = sw_desc->hw_desc[sw_desc->nb_desc - 2]->ddadr;
dma               524 drivers/dma/pxa_dma.c 	updater->dsadr = dma;
dma               525 drivers/dma/pxa_dma.c 	updater->dtadr = dma + 8;
dma               723 drivers/dma/pxa_dma.c 	dma_addr_t dma;
dma               729 drivers/dma/pxa_dma.c 			dma = sw_desc->hw_desc[i - 1]->ddadr;
dma               731 drivers/dma/pxa_dma.c 			dma = sw_desc->first;
dma               733 drivers/dma/pxa_dma.c 			      sw_desc->hw_desc[i], dma);
dma               743 drivers/dma/pxa_dma.c 	dma_addr_t dma;
dma               755 drivers/dma/pxa_dma.c 						     GFP_NOWAIT, &dma);
dma               764 drivers/dma/pxa_dma.c 			sw_desc->first = dma;
dma               766 drivers/dma/pxa_dma.c 			sw_desc->hw_desc[i - 1]->ddadr = dma;
dma               979 drivers/dma/pxa_dma.c 	dma_addr_t dma;
dma               997 drivers/dma/pxa_dma.c 		dma = sg_dma_address(sg);
dma              1003 drivers/dma/pxa_dma.c 			if (dma & 0x7)
dma              1008 drivers/dma/pxa_dma.c 			sw_desc->hw_desc[j]->dsadr = dsadr ? dsadr : dma;
dma              1009 drivers/dma/pxa_dma.c 			sw_desc->hw_desc[j++]->dtadr = dtadr ? dtadr : dma;
dma              1011 drivers/dma/pxa_dma.c 			dma += len;
dma              1028 drivers/dma/pxa_dma.c 	dma_addr_t dma;
dma              1059 drivers/dma/pxa_dma.c 	dma = buf_addr;
dma              1061 drivers/dma/pxa_dma.c 		phw_desc[0]->dsadr = dsadr ? dsadr : dma;
dma              1062 drivers/dma/pxa_dma.c 		phw_desc[0]->dtadr = dtadr ? dtadr : dma;
dma              1065 drivers/dma/pxa_dma.c 		dma += period_len;
dma                86 drivers/dma/sh/rcar-dmac.c 		dma_addr_t dma;
dma               356 drivers/dma/sh/rcar-dmac.c 			chan->index, desc, desc->nchunks, &desc->hwdescs.dma);
dma               364 drivers/dma/sh/rcar-dmac.c 				     desc->hwdescs.dma >> 32);
dma               367 drivers/dma/sh/rcar-dmac.c 				     (desc->hwdescs.dma & 0xfffffff0) |
dma               705 drivers/dma/sh/rcar-dmac.c 				  desc->hwdescs.mem, desc->hwdescs.dma);
dma               714 drivers/dma/sh/rcar-dmac.c 					       &desc->hwdescs.dma, GFP_NOWAIT);
dma               119 drivers/dma/sirf-dma.c 	struct dma_device		dma;
dma               349 drivers/dma/sirf-dma.c 	for (i = 0; i < sdma->dma.chancnt; i++) {
dma               565 drivers/dma/sirf-dma.c 	pm_runtime_get_sync(sdma->dma.dev);
dma               571 drivers/dma/sirf-dma.c 			dev_notice(sdma->dma.dev, "Memory allocation error. "
dma               621 drivers/dma/sirf-dma.c 	pm_runtime_put(sdma->dma.dev);
dma               843 drivers/dma/sirf-dma.c 	struct dma_device *dma;
dma               902 drivers/dma/sirf-dma.c 	dma = &sdma->dma;
dma               903 drivers/dma/sirf-dma.c 	dma->dev = dev;
dma               905 drivers/dma/sirf-dma.c 	dma->device_alloc_chan_resources = sirfsoc_dma_alloc_chan_resources;
dma               906 drivers/dma/sirf-dma.c 	dma->device_free_chan_resources = sirfsoc_dma_free_chan_resources;
dma               907 drivers/dma/sirf-dma.c 	dma->device_issue_pending = sirfsoc_dma_issue_pending;
dma               908 drivers/dma/sirf-dma.c 	dma->device_config = sirfsoc_dma_slave_config;
dma               909 drivers/dma/sirf-dma.c 	dma->device_pause = sirfsoc_dma_pause_chan;
dma               910 drivers/dma/sirf-dma.c 	dma->device_resume = sirfsoc_dma_resume_chan;
dma               911 drivers/dma/sirf-dma.c 	dma->device_terminate_all = sirfsoc_dma_terminate_all;
dma               912 drivers/dma/sirf-dma.c 	dma->device_tx_status = sirfsoc_dma_tx_status;
dma               913 drivers/dma/sirf-dma.c 	dma->device_prep_interleaved_dma = sirfsoc_dma_prep_interleaved;
dma               914 drivers/dma/sirf-dma.c 	dma->device_prep_dma_cyclic = sirfsoc_dma_prep_cyclic;
dma               915 drivers/dma/sirf-dma.c 	dma->src_addr_widths = SIRFSOC_DMA_BUSWIDTHS;
dma               916 drivers/dma/sirf-dma.c 	dma->dst_addr_widths = SIRFSOC_DMA_BUSWIDTHS;
dma               917 drivers/dma/sirf-dma.c 	dma->directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV);
dma               919 drivers/dma/sirf-dma.c 	INIT_LIST_HEAD(&dma->channels);
dma               920 drivers/dma/sirf-dma.c 	dma_cap_set(DMA_SLAVE, dma->cap_mask);
dma               921 drivers/dma/sirf-dma.c 	dma_cap_set(DMA_CYCLIC, dma->cap_mask);
dma               922 drivers/dma/sirf-dma.c 	dma_cap_set(DMA_INTERLEAVE, dma->cap_mask);
dma               923 drivers/dma/sirf-dma.c 	dma_cap_set(DMA_PRIVATE, dma->cap_mask);
dma               928 drivers/dma/sirf-dma.c 		schan->chan.device = dma;
dma               938 drivers/dma/sirf-dma.c 		list_add_tail(&schan->chan.device_node, &dma->channels);
dma               946 drivers/dma/sirf-dma.c 	ret = dma_async_device_register(dma);
dma               963 drivers/dma/sirf-dma.c 	dma_async_device_unregister(dma);
dma               977 drivers/dma/sirf-dma.c 	dma_async_device_unregister(&sdma->dma);
dma              2782 drivers/dma/ste_dma40.c static void __init d40_chan_init(struct d40_base *base, struct dma_device *dma,
dma              2789 drivers/dma/ste_dma40.c 	INIT_LIST_HEAD(&dma->channels);
dma              2794 drivers/dma/ste_dma40.c 		d40c->chan.device = dma;
dma              2811 drivers/dma/ste_dma40.c 			      &dma->channels);
dma                88 drivers/dma/timb_dma.c 	struct dma_device	dma;
dma               263 drivers/dma/timb_dma.c 	for (i = 0; i < td->dma.chancnt; i++) {
dma               580 drivers/dma/timb_dma.c 	for (i = 0; i < td->dma.chancnt; i++)
dma               669 drivers/dma/timb_dma.c 	td->dma.device_alloc_chan_resources	= td_alloc_chan_resources;
dma               670 drivers/dma/timb_dma.c 	td->dma.device_free_chan_resources	= td_free_chan_resources;
dma               671 drivers/dma/timb_dma.c 	td->dma.device_tx_status		= td_tx_status;
dma               672 drivers/dma/timb_dma.c 	td->dma.device_issue_pending		= td_issue_pending;
dma               674 drivers/dma/timb_dma.c 	dma_cap_set(DMA_SLAVE, td->dma.cap_mask);
dma               675 drivers/dma/timb_dma.c 	dma_cap_set(DMA_PRIVATE, td->dma.cap_mask);
dma               676 drivers/dma/timb_dma.c 	td->dma.device_prep_slave_sg = td_prep_slave_sg;
dma               677 drivers/dma/timb_dma.c 	td->dma.device_terminate_all = td_terminate_all;
dma               679 drivers/dma/timb_dma.c 	td->dma.dev = &pdev->dev;
dma               681 drivers/dma/timb_dma.c 	INIT_LIST_HEAD(&td->dma.channels);
dma               695 drivers/dma/timb_dma.c 		td_chan->chan.device = &td->dma;
dma               715 drivers/dma/timb_dma.c 		list_add_tail(&td_chan->chan.device_node, &td->dma.channels);
dma               718 drivers/dma/timb_dma.c 	err = dma_async_device_register(&td->dma);
dma               749 drivers/dma/timb_dma.c 	dma_async_device_unregister(&td->dma);
dma               652 drivers/dma/txx9dmac.c 	dev_vdbg(ddev->chan[0]->dma.dev, "tasklet: mcr=%x\n", mcr);
dma               675 drivers/dma/txx9dmac.c 	dev_vdbg(ddev->chan[0]->dma.dev, "interrupt: status=%#x\n",
dma              1095 drivers/dma/txx9dmac.c 	dc->dma.dev = &pdev->dev;
dma              1096 drivers/dma/txx9dmac.c 	dc->dma.device_alloc_chan_resources = txx9dmac_alloc_chan_resources;
dma              1097 drivers/dma/txx9dmac.c 	dc->dma.device_free_chan_resources = txx9dmac_free_chan_resources;
dma              1098 drivers/dma/txx9dmac.c 	dc->dma.device_terminate_all = txx9dmac_terminate_all;
dma              1099 drivers/dma/txx9dmac.c 	dc->dma.device_tx_status = txx9dmac_tx_status;
dma              1100 drivers/dma/txx9dmac.c 	dc->dma.device_issue_pending = txx9dmac_issue_pending;
dma              1102 drivers/dma/txx9dmac.c 		dc->dma.device_prep_dma_memcpy = txx9dmac_prep_dma_memcpy;
dma              1103 drivers/dma/txx9dmac.c 		dma_cap_set(DMA_MEMCPY, dc->dma.cap_mask);
dma              1105 drivers/dma/txx9dmac.c 		dc->dma.device_prep_slave_sg = txx9dmac_prep_slave_sg;
dma              1106 drivers/dma/txx9dmac.c 		dma_cap_set(DMA_SLAVE, dc->dma.cap_mask);
dma              1107 drivers/dma/txx9dmac.c 		dma_cap_set(DMA_PRIVATE, dc->dma.cap_mask);
dma              1110 drivers/dma/txx9dmac.c 	INIT_LIST_HEAD(&dc->dma.channels);
dma              1126 drivers/dma/txx9dmac.c 	dc->chan.device = &dc->dma;
dma              1144 drivers/dma/txx9dmac.c 	err = dma_async_device_register(&dc->dma);
dma              1148 drivers/dma/txx9dmac.c 		dc->dma.dev_id,
dma              1149 drivers/dma/txx9dmac.c 		dma_has_cap(DMA_MEMCPY, dc->dma.cap_mask) ? " memcpy" : "",
dma              1150 drivers/dma/txx9dmac.c 		dma_has_cap(DMA_SLAVE, dc->dma.cap_mask) ? " slave" : "");
dma              1160 drivers/dma/txx9dmac.c 	dma_async_device_unregister(&dc->dma);
dma               162 drivers/dma/txx9dmac.h 	struct dma_device	dma;
dma                93 drivers/firmware/qemu_fw_cfg.c 	phys_addr_t dma;
dma               110 drivers/firmware/qemu_fw_cfg.c 	dma = virt_to_phys(d);
dma               112 drivers/firmware/qemu_fw_cfg.c 	iowrite32be((u64)dma >> 32, fw_cfg_reg_dma);
dma               115 drivers/firmware/qemu_fw_cfg.c 	iowrite32be(dma, fw_cfg_reg_dma + 4);
dma               234 drivers/firmware/qemu_fw_cfg.c 	struct resource *range, *ctrl, *data, *dma;
dma               271 drivers/firmware/qemu_fw_cfg.c 	dma = platform_get_resource_byname(pdev, IORESOURCE_REG, "dma");
dma               281 drivers/firmware/qemu_fw_cfg.c 	if (dma)
dma               282 drivers/firmware/qemu_fw_cfg.c 		fw_cfg_reg_dma = fw_cfg_dev_base + dma->start;
dma               713 drivers/gpu/drm/drm_bufs.c 	struct drm_device_dma *dma = dev->dma;
dma               729 drivers/gpu/drm/drm_bufs.c 	if (!dma)
dma               777 drivers/gpu/drm/drm_bufs.c 	entry = &dma->bufs[order];
dma               804 drivers/gpu/drm/drm_bufs.c 		buf->idx = dma->buf_count + entry->buf_count;
dma               809 drivers/gpu/drm/drm_bufs.c 		buf->offset = (dma->byte_count + offset);
dma               837 drivers/gpu/drm/drm_bufs.c 	temp_buflist = krealloc(dma->buflist,
dma               838 drivers/gpu/drm/drm_bufs.c 				(dma->buf_count + entry->buf_count) *
dma               839 drivers/gpu/drm/drm_bufs.c 				sizeof(*dma->buflist), GFP_KERNEL);
dma               847 drivers/gpu/drm/drm_bufs.c 	dma->buflist = temp_buflist;
dma               850 drivers/gpu/drm/drm_bufs.c 		dma->buflist[i + dma->buf_count] = &entry->buflist[i];
dma               853 drivers/gpu/drm/drm_bufs.c 	dma->buf_count += entry->buf_count;
dma               854 drivers/gpu/drm/drm_bufs.c 	dma->seg_count += entry->seg_count;
dma               855 drivers/gpu/drm/drm_bufs.c 	dma->page_count += byte_count >> PAGE_SHIFT;
dma               856 drivers/gpu/drm/drm_bufs.c 	dma->byte_count += byte_count;
dma               858 drivers/gpu/drm/drm_bufs.c 	DRM_DEBUG("dma->buf_count : %d\n", dma->buf_count);
dma               866 drivers/gpu/drm/drm_bufs.c 	dma->flags = _DRM_DMA_USE_AGP;
dma               877 drivers/gpu/drm/drm_bufs.c 	struct drm_device_dma *dma = dev->dma;
dma               897 drivers/gpu/drm/drm_bufs.c 	if (!dma)
dma               927 drivers/gpu/drm/drm_bufs.c 	entry = &dma->bufs[order];
dma               958 drivers/gpu/drm/drm_bufs.c 	temp_pagelist = kmalloc_array(dma->page_count + (count << page_order),
dma               959 drivers/gpu/drm/drm_bufs.c 				      sizeof(*dma->pagelist),
dma               969 drivers/gpu/drm/drm_bufs.c 	       dma->pagelist, dma->page_count * sizeof(*dma->pagelist));
dma               971 drivers/gpu/drm/drm_bufs.c 		  dma->page_count + (count << page_order));
dma               995 drivers/gpu/drm/drm_bufs.c 				  dma->page_count + page_count,
dma               997 drivers/gpu/drm/drm_bufs.c 			temp_pagelist[dma->page_count + page_count++]
dma              1004 drivers/gpu/drm/drm_bufs.c 			buf->idx = dma->buf_count + entry->buf_count;
dma              1008 drivers/gpu/drm/drm_bufs.c 			buf->offset = (dma->byte_count + byte_count + offset);
dma              1036 drivers/gpu/drm/drm_bufs.c 	temp_buflist = krealloc(dma->buflist,
dma              1037 drivers/gpu/drm/drm_bufs.c 				(dma->buf_count + entry->buf_count) *
dma              1038 drivers/gpu/drm/drm_bufs.c 				sizeof(*dma->buflist), GFP_KERNEL);
dma              1047 drivers/gpu/drm/drm_bufs.c 	dma->buflist = temp_buflist;
dma              1050 drivers/gpu/drm/drm_bufs.c 		dma->buflist[i + dma->buf_count] = &entry->buflist[i];
dma              1056 drivers/gpu/drm/drm_bufs.c 	if (dma->page_count) {
dma              1057 drivers/gpu/drm/drm_bufs.c 		kfree(dma->pagelist);
dma              1059 drivers/gpu/drm/drm_bufs.c 	dma->pagelist = temp_pagelist;
dma              1061 drivers/gpu/drm/drm_bufs.c 	dma->buf_count += entry->buf_count;
dma              1062 drivers/gpu/drm/drm_bufs.c 	dma->seg_count += entry->seg_count;
dma              1063 drivers/gpu/drm/drm_bufs.c 	dma->page_count += entry->seg_count << page_order;
dma              1064 drivers/gpu/drm/drm_bufs.c 	dma->byte_count += PAGE_SIZE * (entry->seg_count << page_order);
dma              1072 drivers/gpu/drm/drm_bufs.c 		dma->flags = _DRM_DMA_USE_PCI_RO;
dma              1083 drivers/gpu/drm/drm_bufs.c 	struct drm_device_dma *dma = dev->dma;
dma              1101 drivers/gpu/drm/drm_bufs.c 	if (!dma)
dma              1139 drivers/gpu/drm/drm_bufs.c 	entry = &dma->bufs[order];
dma              1166 drivers/gpu/drm/drm_bufs.c 		buf->idx = dma->buf_count + entry->buf_count;
dma              1171 drivers/gpu/drm/drm_bufs.c 		buf->offset = (dma->byte_count + offset);
dma              1200 drivers/gpu/drm/drm_bufs.c 	temp_buflist = krealloc(dma->buflist,
dma              1201 drivers/gpu/drm/drm_bufs.c 				(dma->buf_count + entry->buf_count) *
dma              1202 drivers/gpu/drm/drm_bufs.c 				sizeof(*dma->buflist), GFP_KERNEL);
dma              1210 drivers/gpu/drm/drm_bufs.c 	dma->buflist = temp_buflist;
dma              1213 drivers/gpu/drm/drm_bufs.c 		dma->buflist[i + dma->buf_count] = &entry->buflist[i];
dma              1216 drivers/gpu/drm/drm_bufs.c 	dma->buf_count += entry->buf_count;
dma              1217 drivers/gpu/drm/drm_bufs.c 	dma->seg_count += entry->seg_count;
dma              1218 drivers/gpu/drm/drm_bufs.c 	dma->page_count += byte_count >> PAGE_SHIFT;
dma              1219 drivers/gpu/drm/drm_bufs.c 	dma->byte_count += byte_count;
dma              1221 drivers/gpu/drm/drm_bufs.c 	DRM_DEBUG("dma->buf_count : %d\n", dma->buf_count);
dma              1229 drivers/gpu/drm/drm_bufs.c 	dma->flags = _DRM_DMA_USE_SG;
dma              1297 drivers/gpu/drm/drm_bufs.c 	struct drm_device_dma *dma = dev->dma;
dma              1307 drivers/gpu/drm/drm_bufs.c 	if (!dma)
dma              1319 drivers/gpu/drm/drm_bufs.c 		if (dma->bufs[i].buf_count)
dma              1327 drivers/gpu/drm/drm_bufs.c 			struct drm_buf_entry *from = &dma->bufs[i];
dma              1333 drivers/gpu/drm/drm_bufs.c 					  dma->bufs[i].buf_count,
dma              1334 drivers/gpu/drm/drm_bufs.c 					  dma->bufs[i].buf_size,
dma              1335 drivers/gpu/drm/drm_bufs.c 					  dma->bufs[i].low_mark,
dma              1336 drivers/gpu/drm/drm_bufs.c 					  dma->bufs[i].high_mark);
dma              1384 drivers/gpu/drm/drm_bufs.c 	struct drm_device_dma *dma = dev->dma;
dma              1395 drivers/gpu/drm/drm_bufs.c 	if (!dma)
dma              1403 drivers/gpu/drm/drm_bufs.c 	entry = &dma->bufs[order];
dma              1431 drivers/gpu/drm/drm_bufs.c 	struct drm_device_dma *dma = dev->dma;
dma              1443 drivers/gpu/drm/drm_bufs.c 	if (!dma)
dma              1450 drivers/gpu/drm/drm_bufs.c 		if (idx < 0 || idx >= dma->buf_count) {
dma              1452 drivers/gpu/drm/drm_bufs.c 				  idx, dma->buf_count - 1);
dma              1455 drivers/gpu/drm/drm_bufs.c 		idx = array_index_nospec(idx, dma->buf_count);
dma              1456 drivers/gpu/drm/drm_bufs.c 		buf = dma->buflist[idx];
dma              1488 drivers/gpu/drm/drm_bufs.c 	struct drm_device_dma *dma = dev->dma;
dma              1499 drivers/gpu/drm/drm_bufs.c 	if (!dma)
dma              1510 drivers/gpu/drm/drm_bufs.c 	if (*p >= dma->buf_count) {
dma              1511 drivers/gpu/drm/drm_bufs.c 		if ((dev->agp && (dma->flags & _DRM_DMA_USE_AGP))
dma              1513 drivers/gpu/drm/drm_bufs.c 			&& (dma->flags & _DRM_DMA_USE_SG))) {
dma              1526 drivers/gpu/drm/drm_bufs.c 			virtual = vm_mmap(file_priv->filp, 0, dma->byte_count,
dma              1537 drivers/gpu/drm/drm_bufs.c 		for (i = 0; i < dma->buf_count; i++) {
dma              1538 drivers/gpu/drm/drm_bufs.c 			if (f(data, i, virtual, dma->buflist[i]) < 0) {
dma              1545 drivers/gpu/drm/drm_bufs.c 	*p = dma->buf_count;
dma                63 drivers/gpu/drm/drm_dma.c 	dev->dma = kzalloc(sizeof(*dev->dma), GFP_KERNEL);
dma                64 drivers/gpu/drm/drm_dma.c 	if (!dev->dma)
dma                68 drivers/gpu/drm/drm_dma.c 		memset(&dev->dma->bufs[i], 0, sizeof(dev->dma->bufs[0]));
dma                83 drivers/gpu/drm/drm_dma.c 	struct drm_device_dma *dma = dev->dma;
dma                90 drivers/gpu/drm/drm_dma.c 	if (!dma)
dma                95 drivers/gpu/drm/drm_dma.c 		if (dma->bufs[i].seg_count) {
dma                99 drivers/gpu/drm/drm_dma.c 				  dma->bufs[i].buf_count,
dma               100 drivers/gpu/drm/drm_dma.c 				  dma->bufs[i].seg_count);
dma               101 drivers/gpu/drm/drm_dma.c 			for (j = 0; j < dma->bufs[i].seg_count; j++) {
dma               102 drivers/gpu/drm/drm_dma.c 				if (dma->bufs[i].seglist[j]) {
dma               103 drivers/gpu/drm/drm_dma.c 					drm_pci_free(dev, dma->bufs[i].seglist[j]);
dma               106 drivers/gpu/drm/drm_dma.c 			kfree(dma->bufs[i].seglist);
dma               108 drivers/gpu/drm/drm_dma.c 		if (dma->bufs[i].buf_count) {
dma               109 drivers/gpu/drm/drm_dma.c 			for (j = 0; j < dma->bufs[i].buf_count; j++) {
dma               110 drivers/gpu/drm/drm_dma.c 				kfree(dma->bufs[i].buflist[j].dev_private);
dma               112 drivers/gpu/drm/drm_dma.c 			kfree(dma->bufs[i].buflist);
dma               116 drivers/gpu/drm/drm_dma.c 	kfree(dma->buflist);
dma               117 drivers/gpu/drm/drm_dma.c 	kfree(dma->pagelist);
dma               118 drivers/gpu/drm/drm_dma.c 	kfree(dev->dma);
dma               119 drivers/gpu/drm/drm_dma.c 	dev->dma = NULL;
dma               151 drivers/gpu/drm/drm_dma.c 	struct drm_device_dma *dma = dev->dma;
dma               154 drivers/gpu/drm/drm_dma.c 	if (!dma)
dma               156 drivers/gpu/drm/drm_dma.c 	for (i = 0; i < dma->buf_count; i++) {
dma               157 drivers/gpu/drm/drm_dma.c 		if (dma->buflist[i]->file_priv == file_priv) {
dma               158 drivers/gpu/drm/drm_dma.c 			switch (dma->buflist[i]->list) {
dma               160 drivers/gpu/drm/drm_dma.c 				drm_legacy_free_buffer(dev, dma->buflist[i]);
dma               163 drivers/gpu/drm/drm_dma.c 				dma->buflist[i]->list = DRM_LIST_RECLAIM;
dma               312 drivers/gpu/drm/drm_vm.c 	struct drm_device_dma *dma = dev->dma;
dma               317 drivers/gpu/drm/drm_vm.c 	if (!dma)
dma               319 drivers/gpu/drm/drm_vm.c 	if (!dma->pagelist)
dma               325 drivers/gpu/drm/drm_vm.c 	page = virt_to_page((void *)dma->pagelist[page_nr]);
dma               472 drivers/gpu/drm/drm_vm.c 	struct drm_device_dma *dma;
dma               476 drivers/gpu/drm/drm_vm.c 	dma = dev->dma;
dma               481 drivers/gpu/drm/drm_vm.c 	if (!dma || (length >> PAGE_SHIFT) != dma->page_count) {
dma               486 drivers/gpu/drm/drm_vm.c 	    (dma->flags & _DRM_DMA_USE_PCI_RO)) {
dma                57 drivers/gpu/drm/i810/i810_dma.c 	struct drm_device_dma *dma = dev->dma;
dma                63 drivers/gpu/drm/i810/i810_dma.c 	for (i = 0; i < dma->buf_count; i++) {
dma                64 drivers/gpu/drm/i810/i810_dma.c 		struct drm_buf *buf = dma->buflist[i];
dma               206 drivers/gpu/drm/i810/i810_dma.c 	struct drm_device_dma *dma = dev->dma;
dma               230 drivers/gpu/drm/i810/i810_dma.c 		for (i = 0; i < dma->buf_count; i++) {
dma               231 drivers/gpu/drm/i810/i810_dma.c 			struct drm_buf *buf = dma->buflist[i];
dma               288 drivers/gpu/drm/i810/i810_dma.c 	struct drm_device_dma *dma = dev->dma;
dma               293 drivers/gpu/drm/i810/i810_dma.c 	if (dma->buf_count > 1019) {
dma               298 drivers/gpu/drm/i810/i810_dma.c 	for (i = 0; i < dma->buf_count; i++) {
dma               299 drivers/gpu/drm/i810/i810_dma.c 		struct drm_buf *buf = dma->buflist[i];
dma               859 drivers/gpu/drm/i810/i810_dma.c 	struct drm_device_dma *dma = dev->dma;
dma               872 drivers/gpu/drm/i810/i810_dma.c 	for (i = 0; i < dma->buf_count; i++) {
dma               873 drivers/gpu/drm/i810/i810_dma.c 		struct drm_buf *buf = dma->buflist[i];
dma               892 drivers/gpu/drm/i810/i810_dma.c 	struct drm_device_dma *dma = dev->dma;
dma               895 drivers/gpu/drm/i810/i810_dma.c 	if (!dma)
dma               899 drivers/gpu/drm/i810/i810_dma.c 	if (!dma->buflist)
dma               904 drivers/gpu/drm/i810/i810_dma.c 	for (i = 0; i < dma->buf_count; i++) {
dma               905 drivers/gpu/drm/i810/i810_dma.c 		struct drm_buf *buf = dma->buflist[i];
dma               932 drivers/gpu/drm/i810/i810_dma.c 	struct drm_device_dma *dma = dev->dma;
dma               944 drivers/gpu/drm/i810/i810_dma.c 	if (vertex->idx < 0 || vertex->idx >= dma->buf_count)
dma               948 drivers/gpu/drm/i810/i810_dma.c 				 dma->buflist[vertex->idx],
dma              1095 drivers/gpu/drm/i810/i810_dma.c 	struct drm_device_dma *dma = dev->dma;
dma              1104 drivers/gpu/drm/i810/i810_dma.c 	if (mc->idx >= dma->buf_count || mc->idx < 0)
dma              1107 drivers/gpu/drm/i810/i810_dma.c 	i810_dma_dispatch_mc(dev, dma->buflist[mc->idx], mc->used,
dma               115 drivers/gpu/drm/i915/gem/i915_gem_clflush.c 		dma_resv_add_excl_fence(obj->base.resv, &clflush->base.dma);
dma                94 drivers/gpu/drm/i915/gem/i915_gem_client_blt.c 	struct dma_fence dma;
dma               116 drivers/gpu/drm/i915/gem/i915_gem_client_blt.c 	struct clear_pages_work *w = container_of(fence, typeof(*w), dma);
dma               122 drivers/gpu/drm/i915/gem/i915_gem_client_blt.c 	BUILD_BUG_ON(offsetof(typeof(*w), dma));
dma               123 drivers/gpu/drm/i915/gem/i915_gem_client_blt.c 	dma_fence_free(&w->dma);
dma               136 drivers/gpu/drm/i915/gem/i915_gem_client_blt.c 	dma_fence_signal(&w->dma);
dma               137 drivers/gpu/drm/i915/gem/i915_gem_client_blt.c 	dma_fence_put(&w->dma);
dma               146 drivers/gpu/drm/i915/gem/i915_gem_client_blt.c 		dma_fence_set_error(&w->dma, fence->error);
dma               163 drivers/gpu/drm/i915/gem/i915_gem_client_blt.c 	int err = w->dma.error;
dma               236 drivers/gpu/drm/i915/gem/i915_gem_client_blt.c 		dma_fence_set_error(&w->dma, err);
dma               237 drivers/gpu/drm/i915/gem/i915_gem_client_blt.c 		dma_fence_signal(&w->dma);
dma               238 drivers/gpu/drm/i915/gem/i915_gem_client_blt.c 		dma_fence_put(&w->dma);
dma               254 drivers/gpu/drm/i915/gem/i915_gem_client_blt.c 		dma_fence_put(&w->dma);
dma               292 drivers/gpu/drm/i915/gem/i915_gem_client_blt.c 	dma_fence_init(&work->dma, &clear_pages_work_ops, &fence_lock, 0, 0);
dma               301 drivers/gpu/drm/i915/gem/i915_gem_client_blt.c 		dma_fence_set_error(&work->dma, err);
dma               303 drivers/gpu/drm/i915/gem/i915_gem_client_blt.c 		dma_resv_add_excl_fence(obj->base.resv, &work->dma);
dma               308 drivers/gpu/drm/i915/gem/i915_gem_client_blt.c 	dma_fence_get(&work->dma);
dma                11 drivers/gpu/drm/i915/gem/i915_gem_fence.c 	struct dma_fence dma;
dma                22 drivers/gpu/drm/i915/gem/i915_gem_fence.c 		dma_fence_signal(&stub->dma);
dma                26 drivers/gpu/drm/i915/gem/i915_gem_fence.c 		dma_fence_put(&stub->dma);
dma                45 drivers/gpu/drm/i915/gem/i915_gem_fence.c 	struct stub_fence *stub = container_of(fence, typeof(*stub), dma);
dma                49 drivers/gpu/drm/i915/gem/i915_gem_fence.c 	BUILD_BUG_ON(offsetof(typeof(*stub), dma));
dma                50 drivers/gpu/drm/i915/gem/i915_gem_fence.c 	dma_fence_free(&stub->dma);
dma                71 drivers/gpu/drm/i915/gem/i915_gem_fence.c 	dma_fence_init(&stub->dma, &stub_fence_ops, &stub->chain.wait.lock,
dma                80 drivers/gpu/drm/i915/gem/i915_gem_fence.c 	dma_resv_add_excl_fence(obj->base.resv, &stub->dma);
dma                82 drivers/gpu/drm/i915/gem/i915_gem_fence.c 	return &stub->dma;
dma                85 drivers/gpu/drm/i915/gem/i915_gem_fence.c 	stub_release(&stub->dma);
dma                92 drivers/gpu/drm/i915/gem/i915_gem_fence.c 	struct stub_fence *stub = container_of(fence, typeof(*stub), dma);
dma                26 drivers/gpu/drm/i915/gem/i915_gem_phys.c 	dma_addr_t dma;
dma                41 drivers/gpu/drm/i915/gem/i915_gem_phys.c 				   &dma, GFP_KERNEL);
dma                57 drivers/gpu/drm/i915/gem/i915_gem_phys.c 	sg_dma_address(sg) = dma;
dma                89 drivers/gpu/drm/i915/gem/i915_gem_phys.c 			  vaddr, dma);
dma                97 drivers/gpu/drm/i915/gem/i915_gem_phys.c 	dma_addr_t dma = sg_dma_address(pages->sgl);
dma               135 drivers/gpu/drm/i915/gem/i915_gem_phys.c 			  vaddr, dma);
dma               316 drivers/gpu/drm/i915/gvt/kvmgt.c 	struct gvt_dma *dma;
dma               326 drivers/gpu/drm/i915/gvt/kvmgt.c 		dma = rb_entry(node, struct gvt_dma, gfn_node);
dma               327 drivers/gpu/drm/i915/gvt/kvmgt.c 		gvt_dma_unmap_page(vgpu, dma->gfn, dma->dma_addr, dma->size);
dma               328 drivers/gpu/drm/i915/gvt/kvmgt.c 		__gvt_cache_remove_entry(vgpu, dma);
dma               577 drivers/gpu/drm/i915/i915_gem_gtt.c 	p->daddr = dma_map_page_attrs(vm->dma,
dma               582 drivers/gpu/drm/i915/i915_gem_gtt.c 	if (unlikely(dma_mapping_error(vm->dma, p->daddr))) {
dma               599 drivers/gpu/drm/i915/i915_gem_gtt.c 	dma_unmap_page(vm->dma, p->daddr, PAGE_SIZE, PCI_DMA_BIDIRECTIONAL);
dma               650 drivers/gpu/drm/i915/i915_gem_gtt.c 		addr = dma_map_page_attrs(vm->dma,
dma               655 drivers/gpu/drm/i915/i915_gem_gtt.c 		if (unlikely(dma_mapping_error(vm->dma, addr)))
dma               667 drivers/gpu/drm/i915/i915_gem_gtt.c 		dma_unmap_page(vm->dma, addr, size, PCI_DMA_BIDIRECTIONAL);
dma               684 drivers/gpu/drm/i915/i915_gem_gtt.c 	dma_unmap_page(vm->dma, p->daddr, BIT(order) << PAGE_SHIFT,
dma              1159 drivers/gpu/drm/i915/i915_gem_gtt.c 	dma_addr_t dma, max;
dma              1182 drivers/gpu/drm/i915/i915_gem_gtt.c 		vaddr[gen8_pd_index(idx, 0)] = pte_encode | iter->dma;
dma              1184 drivers/gpu/drm/i915/i915_gem_gtt.c 		iter->dma += I915_GTT_PAGE_SIZE;
dma              1185 drivers/gpu/drm/i915/i915_gem_gtt.c 		if (iter->dma >= iter->max) {
dma              1192 drivers/gpu/drm/i915/i915_gem_gtt.c 			iter->dma = sg_dma_address(iter->sg);
dma              1193 drivers/gpu/drm/i915/i915_gem_gtt.c 			iter->max = iter->dma + iter->sg->length;
dma              1237 drivers/gpu/drm/i915/i915_gem_gtt.c 		    IS_ALIGNED(iter->dma, I915_GTT_PAGE_SIZE_2M) &&
dma              1254 drivers/gpu/drm/i915/i915_gem_gtt.c 			    IS_ALIGNED(iter->dma, I915_GTT_PAGE_SIZE_64K) &&
dma              1264 drivers/gpu/drm/i915/i915_gem_gtt.c 			vaddr[index++] = encode | iter->dma;
dma              1267 drivers/gpu/drm/i915/i915_gem_gtt.c 			iter->dma += page_size;
dma              1269 drivers/gpu/drm/i915/i915_gem_gtt.c 			if (iter->dma >= iter->max) {
dma              1275 drivers/gpu/drm/i915/i915_gem_gtt.c 				iter->dma = sg_dma_address(iter->sg);
dma              1276 drivers/gpu/drm/i915/i915_gem_gtt.c 				iter->max = iter->dma + rem;
dma              1279 drivers/gpu/drm/i915/i915_gem_gtt.c 				    !(IS_ALIGNED(iter->dma, I915_GTT_PAGE_SIZE_64K) &&
dma              1284 drivers/gpu/drm/i915/i915_gem_gtt.c 				if (unlikely(!IS_ALIGNED(iter->dma, page_size)))
dma              1436 drivers/gpu/drm/i915/i915_gem_gtt.c 	ppgtt->vm.dma = &i915->drm.pdev->dev;
dma              1662 drivers/gpu/drm/i915/i915_gem_gtt.c 		vaddr[act_pte] = pte_encode | GEN6_PTE_ADDR_ENCODE(iter.dma);
dma              1664 drivers/gpu/drm/i915/i915_gem_gtt.c 		iter.dma += I915_GTT_PAGE_SIZE;
dma              1665 drivers/gpu/drm/i915/i915_gem_gtt.c 		if (iter.dma == iter.max) {
dma              1670 drivers/gpu/drm/i915/i915_gem_gtt.c 			iter.dma = sg_dma_address(iter.sg);
dma              1671 drivers/gpu/drm/i915/i915_gem_gtt.c 			iter.max = iter.dma + iter.sg->length;
dma              3148 drivers/gpu/drm/i915/i915_gem_gtt.c 	ggtt->vm.dma = &i915->drm.pdev->dev;
dma               295 drivers/gpu/drm/i915/i915_gem_gtt.h 	struct device *dma;
dma               966 drivers/gpu/drm/i915/i915_gpu_error.c 	dma_addr_t dma;
dma               992 drivers/gpu/drm/i915/i915_gpu_error.c 	for_each_sgt_dma(dma, iter, vma->pages) {
dma               995 drivers/gpu/drm/i915/i915_gpu_error.c 		ggtt->vm.insert_page(&ggtt->vm, dma, slot, I915_CACHE_NONE, 0);
dma                23 drivers/gpu/drm/i915/i915_scatterlist.h 		dma_addr_t dma;
dma                27 drivers/gpu/drm/i915/i915_scatterlist.h } __sgt_iter(struct scatterlist *sgl, bool dma) {
dma                33 drivers/gpu/drm/i915/i915_scatterlist.h 		if (dma)
dma                34 drivers/gpu/drm/i915/i915_scatterlist.h 			s.dma = sg_dma_address(s.sgp);
dma                78 drivers/gpu/drm/i915/i915_scatterlist.h 	     ((__dmap) = (__iter).dma + (__iter).curr);			\
dma               374 drivers/gpu/drm/i915/i915_sw_fence.c 	struct dma_fence *dma;
dma               380 drivers/gpu/drm/i915/i915_sw_fence.c static void dma_i915_sw_fence_wake(struct dma_fence *dma,
dma               385 drivers/gpu/drm/i915/i915_sw_fence.c 	i915_sw_fence_set_error_once(cb->fence, dma->error);
dma               400 drivers/gpu/drm/i915/i915_sw_fence.c 		  cb->dma->ops->get_driver_name(cb->dma),
dma               401 drivers/gpu/drm/i915/i915_sw_fence.c 		  cb->dma->ops->get_timeline_name(cb->dma),
dma               402 drivers/gpu/drm/i915/i915_sw_fence.c 		  cb->dma->seqno,
dma               409 drivers/gpu/drm/i915/i915_sw_fence.c static void dma_i915_sw_fence_wake_timer(struct dma_fence *dma,
dma               429 drivers/gpu/drm/i915/i915_sw_fence.c 	dma_fence_put(cb->dma);
dma               435 drivers/gpu/drm/i915/i915_sw_fence.c 				  struct dma_fence *dma,
dma               446 drivers/gpu/drm/i915/i915_sw_fence.c 	if (dma_fence_is_signaled(dma))
dma               457 drivers/gpu/drm/i915/i915_sw_fence.c 		return dma_fence_wait(dma, false);
dma               468 drivers/gpu/drm/i915/i915_sw_fence.c 		timer->dma = dma_fence_get(dma);
dma               478 drivers/gpu/drm/i915/i915_sw_fence.c 	ret = dma_fence_add_callback(dma, &cb->base, func);
dma               482 drivers/gpu/drm/i915/i915_sw_fence.c 		func(dma, &cb->base);
dma               490 drivers/gpu/drm/i915/i915_sw_fence.c static void __dma_i915_sw_fence_wake(struct dma_fence *dma,
dma               495 drivers/gpu/drm/i915/i915_sw_fence.c 	i915_sw_fence_set_error_once(cb->fence, dma->error);
dma               500 drivers/gpu/drm/i915/i915_sw_fence.c 				    struct dma_fence *dma,
dma               507 drivers/gpu/drm/i915/i915_sw_fence.c 	if (dma_fence_is_signaled(dma))
dma               513 drivers/gpu/drm/i915/i915_sw_fence.c 	ret = dma_fence_add_callback(dma, &cb->base, __dma_i915_sw_fence_wake);
dma               517 drivers/gpu/drm/i915/i915_sw_fence.c 		__dma_i915_sw_fence_wake(dma, &cb->base);
dma                78 drivers/gpu/drm/i915/i915_sw_fence.h 				    struct dma_fence *dma,
dma                81 drivers/gpu/drm/i915/i915_sw_fence.h 				  struct dma_fence *dma,
dma                16 drivers/gpu/drm/i915/i915_sw_fence_work.c 		dma_fence_set_error(&f->dma, err);
dma                17 drivers/gpu/drm/i915/i915_sw_fence_work.c 	dma_fence_signal(&f->dma);
dma                18 drivers/gpu/drm/i915/i915_sw_fence_work.c 	dma_fence_put(&f->dma);
dma                29 drivers/gpu/drm/i915/i915_sw_fence_work.c 			dma_fence_set_error(&f->dma, fence->error);
dma                31 drivers/gpu/drm/i915/i915_sw_fence_work.c 		if (!f->dma.error) {
dma                32 drivers/gpu/drm/i915/i915_sw_fence_work.c 			dma_fence_get(&f->dma);
dma                35 drivers/gpu/drm/i915/i915_sw_fence_work.c 			dma_fence_signal(&f->dma);
dma                40 drivers/gpu/drm/i915/i915_sw_fence_work.c 		dma_fence_put(&f->dma);
dma                54 drivers/gpu/drm/i915/i915_sw_fence_work.c 	struct dma_fence_work *f = container_of(fence, typeof(*f), dma);
dma                61 drivers/gpu/drm/i915/i915_sw_fence_work.c 	struct dma_fence_work *f = container_of(fence, typeof(*f), dma);
dma                68 drivers/gpu/drm/i915/i915_sw_fence_work.c 	BUILD_BUG_ON(offsetof(typeof(*f), dma));
dma                69 drivers/gpu/drm/i915/i915_sw_fence_work.c 	dma_fence_free(&f->dma);
dma                82 drivers/gpu/drm/i915/i915_sw_fence_work.c 	dma_fence_init(&f->dma, &fence_ops, &f->lock, 0, 0);
dma                25 drivers/gpu/drm/i915/i915_sw_fence_work.h 	struct dma_fence dma;
dma                46 drivers/gpu/drm/i915/selftests/i915_gem.c 		const dma_addr_t dma = i915->dsm.start + page;
dma                50 drivers/gpu/drm/i915/selftests/i915_gem.c 		ggtt->vm.insert_page(&ggtt->vm, dma, slot, I915_CACHE_NONE, 0);
dma               630 drivers/gpu/drm/i915/selftests/i915_sw_fence.c 	struct dma_fence *dma;
dma               632 drivers/gpu/drm/i915/selftests/i915_sw_fence.c 	dma = kmalloc(sizeof(*dma), GFP_KERNEL);
dma               633 drivers/gpu/drm/i915/selftests/i915_sw_fence.c 	if (dma)
dma               634 drivers/gpu/drm/i915/selftests/i915_sw_fence.c 		dma_fence_init(dma, &mock_fence_ops, &mock_fence_lock, 0, 0);
dma               636 drivers/gpu/drm/i915/selftests/i915_sw_fence.c 	return dma;
dma               640 drivers/gpu/drm/i915/selftests/i915_sw_fence.c wrap_dma_fence(struct dma_fence *dma, unsigned long delay)
dma               649 drivers/gpu/drm/i915/selftests/i915_sw_fence.c 	err = i915_sw_fence_await_dma_fence(fence, dma, delay, GFP_NOWAIT);
dma               664 drivers/gpu/drm/i915/selftests/i915_sw_fence.c 	struct dma_fence *dma;
dma               667 drivers/gpu/drm/i915/selftests/i915_sw_fence.c 	dma = alloc_dma_fence();
dma               668 drivers/gpu/drm/i915/selftests/i915_sw_fence.c 	if (!dma)
dma               671 drivers/gpu/drm/i915/selftests/i915_sw_fence.c 	timeout = wrap_dma_fence(dma, delay);
dma               677 drivers/gpu/drm/i915/selftests/i915_sw_fence.c 	not = wrap_dma_fence(dma, 0);
dma               718 drivers/gpu/drm/i915/selftests/i915_sw_fence.c 	dma_fence_signal(dma);
dma               727 drivers/gpu/drm/i915/selftests/i915_sw_fence.c 	dma_fence_put(dma);
dma               732 drivers/gpu/drm/i915/selftests/i915_sw_fence.c 	dma_fence_signal(dma);
dma               737 drivers/gpu/drm/i915/selftests/i915_sw_fence.c 	dma_fence_put(dma);
dma               624 drivers/gpu/drm/i915/selftests/i915_vma.c 	dma_addr_t dma;
dma               626 drivers/gpu/drm/i915/selftests/i915_vma.c 	for_each_sgt_dma(dma, sgt, vma->pages) {
dma               635 drivers/gpu/drm/i915/selftests/i915_vma.c 		if (src != dma) {
dma               376 drivers/gpu/drm/imx/ipuv3-crtc.c 	ipu_crtc->plane[0] = ipu_plane_init(drm, ipu, pdata->dma[0], dp, 0,
dma               396 drivers/gpu/drm/imx/ipuv3-crtc.c 	if (pdata->dp >= 0 && pdata->dma[1] > 0) {
dma               397 drivers/gpu/drm/imx/ipuv3-crtc.c 		ipu_crtc->plane[1] = ipu_plane_init(drm, ipu, pdata->dma[1],
dma               162 drivers/gpu/drm/imx/ipuv3-plane.c 	ipu_plane->ipu_ch = ipu_idmac_get(ipu_plane->ipu, ipu_plane->dma);
dma               169 drivers/gpu/drm/imx/ipuv3-plane.c 	alpha_ch = ipu_channel_alpha_channel(ipu_plane->dma);
dma               180 drivers/gpu/drm/imx/ipuv3-plane.c 	ipu_plane->dmfc = ipu_dmfc_get(ipu_plane->ipu, ipu_plane->dma);
dma               591 drivers/gpu/drm/imx/ipuv3-plane.c 		axi_id = ipu_chan_assign_axi_id(ipu_plane->dma);
dma               826 drivers/gpu/drm/imx/ipuv3-plane.c 				 int dma, int dp, unsigned int possible_crtcs,
dma               835 drivers/gpu/drm/imx/ipuv3-plane.c 		      dma, dp, possible_crtcs);
dma               844 drivers/gpu/drm/imx/ipuv3-plane.c 	ipu_plane->dma = dma;
dma                26 drivers/gpu/drm/imx/ipuv3-plane.h 	int			dma;
dma                33 drivers/gpu/drm/imx/ipuv3-plane.h 				 int dma, int dp, unsigned int possible_crtcs,
dma                92 drivers/gpu/drm/lima/lima_mmu.c 	mmu_write(LIMA_MMU_DTE_ADDR, dev->empty_vm->pd.dma);
dma               113 drivers/gpu/drm/lima/lima_mmu.c 		mmu_write(LIMA_MMU_DTE_ADDR, vm->pd.dma);
dma               137 drivers/gpu/drm/lima/lima_mmu.c 		mmu_write(LIMA_MMU_DTE_ADDR, dev->empty_vm->pd.dma);
dma                47 drivers/gpu/drm/lima/lima_vm.c static int lima_vm_map_page_table(struct lima_vm *vm, dma_addr_t *dma,
dma                64 drivers/gpu/drm/lima/lima_vm.c 				&vm->bts[pbe].dma, GFP_KERNEL | __GFP_ZERO);
dma                71 drivers/gpu/drm/lima/lima_vm.c 			pts = vm->bts[pbe].dma;
dma                79 drivers/gpu/drm/lima/lima_vm.c 		vm->bts[pbe].cpu[bte] = dma[i++] | LIMA_VM_FLAGS_CACHE;
dma               212 drivers/gpu/drm/lima/lima_vm.c 	vm->pd.cpu = dma_alloc_wc(dev->dev, LIMA_PAGE_SIZE, &vm->pd.dma,
dma               230 drivers/gpu/drm/lima/lima_vm.c 	dma_free_wc(dev->dev, LIMA_PAGE_SIZE, vm->pd.cpu, vm->pd.dma);
dma               246 drivers/gpu/drm/lima/lima_vm.c 				    vm->bts[i].cpu, vm->bts[i].dma);
dma               250 drivers/gpu/drm/lima/lima_vm.c 		dma_free_wc(vm->dev->dev, LIMA_PAGE_SIZE, vm->pd.cpu, vm->pd.dma);
dma                26 drivers/gpu/drm/lima/lima_vm.h 	dma_addr_t dma;
dma               246 drivers/gpu/drm/mga/mga_dma.c 	struct drm_device_dma *dma = dev->dma;
dma               251 drivers/gpu/drm/mga/mga_dma.c 	DRM_DEBUG("count=%d\n", dma->buf_count);
dma               259 drivers/gpu/drm/mga/mga_dma.c 	for (i = 0; i < dma->buf_count; i++) {
dma               260 drivers/gpu/drm/mga/mga_dma.c 		buf = dma->buflist[i];
dma               309 drivers/gpu/drm/mga/mga_dma.c 	struct drm_device_dma *dma = dev->dma;
dma               314 drivers/gpu/drm/mga/mga_dma.c 	for (i = 0; i < dma->buf_count; i++) {
dma               315 drivers/gpu/drm/mga/mga_dma.c 		buf = dma->buflist[i];
dma               638 drivers/gpu/drm/mga/mga_dma.c 	if (dev->dma == NULL) {
dma              1112 drivers/gpu/drm/mga/mga_dma.c 	struct drm_device_dma *dma = dev->dma;
dma              1129 drivers/gpu/drm/mga/mga_dma.c 	if (d->request_count < 0 || d->request_count > dma->buf_count) {
dma              1132 drivers/gpu/drm/mga/mga_dma.c 			  dma->buf_count);
dma               873 drivers/gpu/drm/mga/mga_state.c 	struct drm_device_dma *dma = dev->dma;
dma               880 drivers/gpu/drm/mga/mga_state.c 	if (vertex->idx < 0 || vertex->idx > dma->buf_count)
dma               882 drivers/gpu/drm/mga/mga_state.c 	buf = dma->buflist[vertex->idx];
dma               908 drivers/gpu/drm/mga/mga_state.c 	struct drm_device_dma *dma = dev->dma;
dma               915 drivers/gpu/drm/mga/mga_state.c 	if (indices->idx < 0 || indices->idx > dma->buf_count)
dma               918 drivers/gpu/drm/mga/mga_state.c 	buf = dma->buflist[indices->idx];
dma               942 drivers/gpu/drm/mga/mga_state.c 	struct drm_device_dma *dma = dev->dma;
dma               958 drivers/gpu/drm/mga/mga_state.c 	if (iload->idx < 0 || iload->idx > dma->buf_count)
dma               961 drivers/gpu/drm/mga/mga_state.c 	buf = dma->buflist[iload->idx];
dma               701 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_catalog.h #define BLK_DMA(s) ((s)->dma)
dma                20 drivers/gpu/drm/msm/disp/mdp4/mdp4_crtc.c 	enum mdp4_dma dma;
dma               228 drivers/gpu/drm/msm/disp/mdp4/mdp4_crtc.c 	enum mdp4_dma dma = mdp4_crtc->dma;
dma               240 drivers/gpu/drm/msm/disp/mdp4/mdp4_crtc.c 	mdp4_write(mdp4_kms, REG_MDP4_DMA_SRC_SIZE(dma),
dma               245 drivers/gpu/drm/msm/disp/mdp4/mdp4_crtc.c 	mdp4_write(mdp4_kms, REG_MDP4_DMA_SRC_BASE(dma), 0);
dma               246 drivers/gpu/drm/msm/disp/mdp4/mdp4_crtc.c 	mdp4_write(mdp4_kms, REG_MDP4_DMA_SRC_STRIDE(dma), 0);
dma               247 drivers/gpu/drm/msm/disp/mdp4/mdp4_crtc.c 	mdp4_write(mdp4_kms, REG_MDP4_DMA_DST_SIZE(dma),
dma               259 drivers/gpu/drm/msm/disp/mdp4/mdp4_crtc.c 	if (dma == DMA_E) {
dma               359 drivers/gpu/drm/msm/disp/mdp4/mdp4_crtc.c 	enum mdp4_dma dma = mdp4_crtc->dma;
dma               374 drivers/gpu/drm/msm/disp/mdp4/mdp4_crtc.c 			mdp4_write(mdp4_kms, REG_MDP4_DMA_CURSOR_SIZE(dma),
dma               377 drivers/gpu/drm/msm/disp/mdp4/mdp4_crtc.c 			mdp4_write(mdp4_kms, REG_MDP4_DMA_CURSOR_BASE(dma), iova);
dma               378 drivers/gpu/drm/msm/disp/mdp4/mdp4_crtc.c 			mdp4_write(mdp4_kms, REG_MDP4_DMA_CURSOR_BLEND_CONFIG(dma),
dma               383 drivers/gpu/drm/msm/disp/mdp4/mdp4_crtc.c 			mdp4_write(mdp4_kms, REG_MDP4_DMA_CURSOR_BASE(dma),
dma               395 drivers/gpu/drm/msm/disp/mdp4/mdp4_crtc.c 	mdp4_write(mdp4_kms, REG_MDP4_DMA_CURSOR_POS(dma),
dma               559 drivers/gpu/drm/msm/disp/mdp4/mdp4_crtc.c 	mdp4_write(mdp4_kms, REG_MDP4_DMA_CONFIG(mdp4_crtc->dma), config);
dma               571 drivers/gpu/drm/msm/disp/mdp4/mdp4_crtc.c 	switch (mdp4_crtc->dma) {
dma               633 drivers/gpu/drm/msm/disp/mdp4/mdp4_crtc.c 	mdp4_crtc->dma = dma_id;
dma               635 drivers/gpu/drm/msm/disp/mdp4/mdp4_crtc.c 	mdp4_crtc->vblank.irqmask = dma2irq(mdp4_crtc->dma);
dma               638 drivers/gpu/drm/msm/disp/mdp4/mdp4_crtc.c 	mdp4_crtc->err.irqmask = dma2err(mdp4_crtc->dma);
dma                81 drivers/gpu/drm/msm/disp/mdp4/mdp4_kms.h static inline uint32_t dma2irq(enum mdp4_dma dma)
dma                83 drivers/gpu/drm/msm/disp/mdp4/mdp4_kms.h 	switch (dma) {
dma                91 drivers/gpu/drm/msm/disp/mdp4/mdp4_kms.h static inline uint32_t dma2err(enum mdp4_dma dma)
dma                93 drivers/gpu/drm/msm/disp/mdp4/mdp4_kms.h 	switch (dma) {
dma                19 drivers/gpu/drm/nouveau/include/nvif/if000a.h 	dma_addr_t *dma;
dma               162 drivers/gpu/drm/nouveau/include/nvkm/core/device.h 	struct nvkm_dma *dma;
dma               235 drivers/gpu/drm/nouveau/include/nvkm/core/device.h 	int (*dma     )(struct nvkm_device *, int idx, struct nvkm_dma **);
dma                10 drivers/gpu/drm/nouveau/include/nvkm/engine/dma.h 	struct nvkm_dma *dma;
dma                68 drivers/gpu/drm/nouveau/include/nvkm/subdev/mmu.h 	dma_addr_t *dma;
dma               427 drivers/gpu/drm/nouveau/nouveau_chan.c 		chan->dma.max = (0x10000 / 4) - 2;
dma               433 drivers/gpu/drm/nouveau/nouveau_chan.c 		chan->dma.ib_base =  0x10000 / 4;
dma               434 drivers/gpu/drm/nouveau/nouveau_chan.c 		chan->dma.ib_max  = (0x02000 / 8) - 1;
dma               435 drivers/gpu/drm/nouveau/nouveau_chan.c 		chan->dma.ib_put  = 0;
dma               436 drivers/gpu/drm/nouveau/nouveau_chan.c 		chan->dma.ib_free = chan->dma.ib_max - chan->dma.ib_put;
dma               437 drivers/gpu/drm/nouveau/nouveau_chan.c 		chan->dma.max = chan->dma.ib_base;
dma               441 drivers/gpu/drm/nouveau/nouveau_chan.c 	chan->dma.put = 0;
dma               442 drivers/gpu/drm/nouveau/nouveau_chan.c 	chan->dma.cur = chan->dma.put;
dma               443 drivers/gpu/drm/nouveau/nouveau_chan.c 	chan->dma.free = chan->dma.max - chan->dma.cur;
dma                40 drivers/gpu/drm/nouveau/nouveau_chan.h 	} dma;
dma                38 drivers/gpu/drm/nouveau/nouveau_dma.c 	mem = &mem[chan->dma.cur];
dma                43 drivers/gpu/drm/nouveau/nouveau_dma.c 	chan->dma.cur += nr_dwords;
dma                78 drivers/gpu/drm/nouveau/nouveau_dma.c 	    val > chan->push.addr + (chan->dma.max << 2))
dma                89 drivers/gpu/drm/nouveau/nouveau_dma.c 	int ip = (chan->dma.ib_put * 2) + chan->dma.ib_base;
dma                91 drivers/gpu/drm/nouveau/nouveau_dma.c 	BUG_ON(chan->dma.ib_free < 1);
dma                96 drivers/gpu/drm/nouveau/nouveau_dma.c 	chan->dma.ib_put = (chan->dma.ib_put + 1) & chan->dma.ib_max;
dma               102 drivers/gpu/drm/nouveau/nouveau_dma.c 	nvif_wr32(&chan->user, 0x8c, chan->dma.ib_put);
dma               105 drivers/gpu/drm/nouveau/nouveau_dma.c 	chan->dma.ib_free--;
dma               113 drivers/gpu/drm/nouveau/nouveau_dma.c 	while (chan->dma.ib_free < count) {
dma               126 drivers/gpu/drm/nouveau/nouveau_dma.c 		chan->dma.ib_free = get - chan->dma.ib_put;
dma               127 drivers/gpu/drm/nouveau/nouveau_dma.c 		if (chan->dma.ib_free <= 0)
dma               128 drivers/gpu/drm/nouveau/nouveau_dma.c 			chan->dma.ib_free += chan->dma.ib_max;
dma               144 drivers/gpu/drm/nouveau/nouveau_dma.c 	while (chan->dma.free < count) {
dma               153 drivers/gpu/drm/nouveau/nouveau_dma.c 		if (get <= chan->dma.cur) {
dma               154 drivers/gpu/drm/nouveau/nouveau_dma.c 			chan->dma.free = chan->dma.max - chan->dma.cur;
dma               155 drivers/gpu/drm/nouveau/nouveau_dma.c 			if (chan->dma.free >= count)
dma               167 drivers/gpu/drm/nouveau/nouveau_dma.c 			chan->dma.cur = 0;
dma               168 drivers/gpu/drm/nouveau/nouveau_dma.c 			chan->dma.put = 0;
dma               171 drivers/gpu/drm/nouveau/nouveau_dma.c 		chan->dma.free = get - chan->dma.cur - 1;
dma               183 drivers/gpu/drm/nouveau/nouveau_dma.c 	if (chan->dma.ib_max)
dma               186 drivers/gpu/drm/nouveau/nouveau_dma.c 	while (chan->dma.free < size) {
dma               203 drivers/gpu/drm/nouveau/nouveau_dma.c 		if (get <= chan->dma.cur) {
dma               217 drivers/gpu/drm/nouveau/nouveau_dma.c 			chan->dma.free = chan->dma.max - chan->dma.cur;
dma               218 drivers/gpu/drm/nouveau/nouveau_dma.c 			if (chan->dma.free >= size)
dma               244 drivers/gpu/drm/nouveau/nouveau_dma.c 			chan->dma.cur  =
dma               245 drivers/gpu/drm/nouveau/nouveau_dma.c 			chan->dma.put  = NOUVEAU_DMA_SKIPS;
dma               254 drivers/gpu/drm/nouveau/nouveau_dma.c 		chan->dma.free = get - chan->dma.cur - 1;
dma                95 drivers/gpu/drm/nouveau/nouveau_dma.h 	chan->dma.free -= size;
dma               102 drivers/gpu/drm/nouveau/nouveau_dma.h 	nouveau_bo_wr32(chan->push.buffer, chan->dma.cur++, data);
dma               147 drivers/gpu/drm/nouveau/nouveau_dma.h 	if (chan->dma.cur == chan->dma.put)
dma               151 drivers/gpu/drm/nouveau/nouveau_dma.h 	if (chan->dma.ib_max) {
dma               152 drivers/gpu/drm/nouveau/nouveau_dma.h 		nv50_dma_push(chan, chan->push.addr + (chan->dma.put << 2),
dma               153 drivers/gpu/drm/nouveau/nouveau_dma.h 			      (chan->dma.cur - chan->dma.put) << 2);
dma               155 drivers/gpu/drm/nouveau/nouveau_dma.h 		WRITE_PUT(chan->dma.cur);
dma               158 drivers/gpu/drm/nouveau/nouveau_dma.h 	chan->dma.put = chan->dma.cur;
dma               164 drivers/gpu/drm/nouveau/nouveau_dma.h 	chan->dma.cur = chan->dma.put;
dma               775 drivers/gpu/drm/nouveau/nouveau_gem.c 	if (chan->dma.ib_max) {
dma               816 drivers/gpu/drm/nouveau/nouveau_gem.c 			cmd = chan->push.addr + ((chan->dma.cur + 2) << 2);
dma               859 drivers/gpu/drm/nouveau/nouveau_gem.c 	if (chan->dma.ib_max) {
dma               868 drivers/gpu/drm/nouveau/nouveau_gem.c 			      (chan->push.addr + ((chan->dma.cur + 2) << 2));
dma               120 drivers/gpu/drm/nouveau/nouveau_mem.c 	else            args.dma = tt->dma_address;
dma                92 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = nv04_dma_new,
dma               113 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = nv04_dma_new,
dma               135 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = nv04_dma_new,
dma               155 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = nv04_dma_new,
dma               177 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = nv04_dma_new,
dma               199 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = nv04_dma_new,
dma               221 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = nv04_dma_new,
dma               243 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = nv04_dma_new,
dma               265 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = nv04_dma_new,
dma               287 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = nv04_dma_new,
dma               309 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = nv04_dma_new,
dma               331 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = nv04_dma_new,
dma               353 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = nv04_dma_new,
dma               375 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = nv04_dma_new,
dma               397 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = nv04_dma_new,
dma               420 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = nv04_dma_new,
dma               443 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = nv04_dma_new,
dma               465 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = nv04_dma_new,
dma               490 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = nv04_dma_new,
dma               516 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = nv04_dma_new,
dma               542 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = nv04_dma_new,
dma               568 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = nv04_dma_new,
dma               594 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = nv04_dma_new,
dma               620 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = nv04_dma_new,
dma               646 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = nv04_dma_new,
dma               672 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = nv04_dma_new,
dma               698 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = nv04_dma_new,
dma               724 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = nv04_dma_new,
dma               750 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = nv04_dma_new,
dma               776 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = nv04_dma_new,
dma               802 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = nv04_dma_new,
dma               831 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = nv50_dma_new,
dma               857 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = nv04_dma_new,
dma               883 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = nv04_dma_new,
dma               909 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = nv04_dma_new,
dma               940 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = nv50_dma_new,
dma               972 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = nv50_dma_new,
dma              1004 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = nv50_dma_new,
dma              1036 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = nv50_dma_new,
dma              1068 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = nv50_dma_new,
dma              1098 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = nv50_dma_new,
dma              1132 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = nv50_dma_new,
dma              1164 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = nv50_dma_new,
dma              1198 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = nv50_dma_new,
dma              1231 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = nv50_dma_new,
dma              1262 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = nv50_dma_new,
dma              1294 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = nv50_dma_new,
dma              1328 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = nv50_dma_new,
dma              1365 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = gf100_dma_new,
dma              1401 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = gf100_dma_new,
dma              1437 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = gf100_dma_new,
dma              1474 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = gf100_dma_new,
dma              1511 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = gf100_dma_new,
dma              1548 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = gf100_dma_new,
dma              1584 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = gf100_dma_new,
dma              1619 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = gf119_dma_new,
dma              1655 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = gf119_dma_new,
dma              1694 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = gf119_dma_new,
dma              1733 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = gf119_dma_new,
dma              1772 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = gf119_dma_new,
dma              1800 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = gf119_dma_new,
dma              1836 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = gf119_dma_new,
dma              1874 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = gf119_dma_new,
dma              1912 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = gf119_dma_new,
dma              1950 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = gf119_dma_new,
dma              1987 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = gf119_dma_new,
dma              2021 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = gf119_dma_new,
dma              2056 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = gf119_dma_new,
dma              2091 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = gf119_dma_new,
dma              2126 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = gf119_dma_new,
dma              2151 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = gf119_dma_new,
dma              2186 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = gf119_dma_new,
dma              2221 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = gf119_dma_new,
dma              2257 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = gf119_dma_new,
dma              2293 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = gf119_dma_new,
dma              2329 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = gf119_dma_new,
dma              2365 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = gf119_dma_new,
dma              2391 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = gf119_dma_new,
dma              2431 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = gv100_dma_new,
dma              2467 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = gv100_dma_new,
dma              2502 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = gv100_dma_new,
dma              2537 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = gv100_dma_new,
dma              2572 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = gv100_dma_new,
dma              2607 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.dma = gv100_dma_new,
dma              2696 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	_(DMAOBJ , device->dma     , &device->dma->engine);
dma              3184 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 		_(NVKM_ENGINE_DMAOBJ  ,      dma);
dma                36 drivers/gpu/drm/nouveau/nvkm/engine/dma/base.c 	struct nvkm_dma *dma = nvkm_dma(oclass->engine);
dma                40 drivers/gpu/drm/nouveau/nvkm/engine/dma/base.c 	ret = dma->func->class_new(dma, oclass, data, size, &dmaobj);
dma               109 drivers/gpu/drm/nouveau/nvkm/engine/dma/base.c 	struct nvkm_dma *dma;
dma               111 drivers/gpu/drm/nouveau/nvkm/engine/dma/base.c 	if (!(dma = *pdma = kzalloc(sizeof(*dma), GFP_KERNEL)))
dma               113 drivers/gpu/drm/nouveau/nvkm/engine/dma/base.c 	dma->func = func;
dma               115 drivers/gpu/drm/nouveau/nvkm/engine/dma/base.c 	return nvkm_engine_ctor(&nvkm_dma, device, index, true, &dma->engine);
dma                68 drivers/gpu/drm/nouveau/nvkm/engine/dma/user.c nvkm_dmaobj_ctor(const struct nvkm_dmaobj_func *func, struct nvkm_dma *dma,
dma                75 drivers/gpu/drm/nouveau/nvkm/engine/dma/user.c 	struct nvkm_device *device = dma->engine.subdev.device;
dma                86 drivers/gpu/drm/nouveau/nvkm/engine/dma/user.c 	dmaobj->dma = dma;
dma                45 drivers/gpu/drm/nouveau/nvkm/engine/dma/usergf100.c 	struct nvkm_device *device = dmaobj->base.dma->engine.subdev.device;
dma                70 drivers/gpu/drm/nouveau/nvkm/engine/dma/usergf100.c gf100_dmaobj_new(struct nvkm_dma *dma, const struct nvkm_oclass *oclass,
dma                85 drivers/gpu/drm/nouveau/nvkm/engine/dma/usergf100.c 	ret = nvkm_dmaobj_ctor(&gf100_dmaobj_func, dma, oclass,
dma                44 drivers/gpu/drm/nouveau/nvkm/engine/dma/usergf119.c 	struct nvkm_device *device = dmaobj->base.dma->engine.subdev.device;
dma                68 drivers/gpu/drm/nouveau/nvkm/engine/dma/usergf119.c gf119_dmaobj_new(struct nvkm_dma *dma, const struct nvkm_oclass *oclass,
dma                83 drivers/gpu/drm/nouveau/nvkm/engine/dma/usergf119.c 	ret = nvkm_dmaobj_ctor(&gf119_dmaobj_func, dma, oclass,
dma                42 drivers/gpu/drm/nouveau/nvkm/engine/dma/usergv100.c 	struct nvkm_device *device = dmaobj->base.dma->engine.subdev.device;
dma                67 drivers/gpu/drm/nouveau/nvkm/engine/dma/usergv100.c gv100_dmaobj_new(struct nvkm_dma *dma, const struct nvkm_oclass *oclass,
dma                82 drivers/gpu/drm/nouveau/nvkm/engine/dma/usergv100.c 	ret = nvkm_dmaobj_ctor(&gv100_dmaobj_func, dma, oclass,
dma                45 drivers/gpu/drm/nouveau/nvkm/engine/dma/usernv04.c 	struct nvkm_device *device = dmaobj->base.dma->engine.subdev.device;
dma                81 drivers/gpu/drm/nouveau/nvkm/engine/dma/usernv04.c nv04_dmaobj_new(struct nvkm_dma *dma, const struct nvkm_oclass *oclass,
dma                84 drivers/gpu/drm/nouveau/nvkm/engine/dma/usernv04.c 	struct nvkm_device *device = dma->engine.subdev.device;
dma                92 drivers/gpu/drm/nouveau/nvkm/engine/dma/usernv04.c 	ret = nvkm_dmaobj_ctor(&nv04_dmaobj_func, dma, oclass,
dma                45 drivers/gpu/drm/nouveau/nvkm/engine/dma/usernv50.c 	struct nvkm_device *device = dmaobj->base.dma->engine.subdev.device;
dma                70 drivers/gpu/drm/nouveau/nvkm/engine/dma/usernv50.c nv50_dmaobj_new(struct nvkm_dma *dma, const struct nvkm_oclass *oclass,
dma                85 drivers/gpu/drm/nouveau/nvkm/engine/dma/usernv50.c 	ret = nvkm_dmaobj_ctor(&nv50_dmaobj_func, dma, oclass,
dma                38 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/mem.c 		dma_addr_t *dma;
dma                59 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/mem.c 		return mem->dma[0];
dma                77 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/mem.c 		.dma = mem->dma,
dma                89 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/mem.c 				       mem->dma[mem->pages], PAGE_SIZE,
dma                93 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/mem.c 		kvfree(mem->dma);
dma               173 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/mem.c 		if (args->v0.dma) {
dma               175 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/mem.c 			mem->dma = args->v0.dma;
dma               196 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/mem.c 	if (!(mem->dma = kvmalloc_array(size, sizeof(*mem->dma), GFP_KERNEL)))
dma               209 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/mem.c 		mem->dma[mem->pages] = dma_map_page(mmu->subdev.device->dev,
dma               212 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/mem.c 		if (dma_mapping_error(dev, mem->dma[mem->pages])) {
dma               263 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 	bool dma;
dma               267 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 		dma = desc->func->pfn_clear(it->vmm, pgt->pt[type], ptei, ptes);
dma               268 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 		if (dma) {
dma              1485 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 		map->dma += map->offset >> PAGE_SHIFT;
dma              1487 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 		func = map->page->desc->func->dma;
dma                68 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.h 	nvkm_vmm_pte_func dma;
dma               318 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.h 		     *MAP->dma, PAGE_SIZE, MAP->dma++)
dma                72 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c 			const u64 data = (*map->dma++ >> 8) | map->type;
dma               101 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c 	.dma = gf100_vmm_pgt_dma,
dma                40 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgm200.c 	.dma = gf100_vmm_pgt_dma,
dma                58 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c 	bool dma = false;
dma                66 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c 			dma = true;
dma                71 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c 	return dma;
dma               137 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c 			const u64 data = (*map->dma++ >> 4) | map->type;
dma               168 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c 	.dma = gp100_vmm_pgt_dma,
dma                52 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv04.c 		VMM_WO032(pt, vmm, 8 + (ptei++ * 4), *map->dma++ | 0x00000003);
dma                69 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv04.c 	.dma = nv04_vmm_pgt_dma,
dma                51 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv41.c 		const u32 data = (*map->dma++ >> 7) | 0x00000001;
dma                70 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv41.c 	.dma = nv41_vmm_pgt_dma,
dma               119 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c 		nv44_vmm_pgt_fill(vmm, pt, map->dma, ptei, pten);
dma               122 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c 		map->dma += pten;
dma               128 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c 			tmp[i] = *map->dma++ >> 12;
dma               137 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c 		nv44_vmm_pgt_fill(vmm, pt, map->dma, ptei, ptes);
dma               138 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c 		map->dma += ptes;
dma               174 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c 	.dma = nv44_vmm_pgt_dma,
dma                72 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c 			const u64 data = *map->dma++ + map->type;
dma               101 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c 	.dma = nv50_vmm_pgt_dma,
dma               775 drivers/gpu/drm/r128/r128_cce.c 	struct drm_device_dma *dma = dev->dma;
dma               788 drivers/gpu/drm/r128/r128_cce.c 	for (i = 0; i < dma->buf_count; i++) {
dma               789 drivers/gpu/drm/r128/r128_cce.c 		buf = dma->buflist[i];
dma               820 drivers/gpu/drm/r128/r128_cce.c 	struct drm_device_dma *dma = dev->dma;
dma               828 drivers/gpu/drm/r128/r128_cce.c 	for (i = 0; i < dma->buf_count; i++) {
dma               829 drivers/gpu/drm/r128/r128_cce.c 		buf = dma->buflist[i];
dma               838 drivers/gpu/drm/r128/r128_cce.c 		for (i = 0; i < dma->buf_count; i++) {
dma               839 drivers/gpu/drm/r128/r128_cce.c 			buf = dma->buflist[i];
dma               858 drivers/gpu/drm/r128/r128_cce.c 	struct drm_device_dma *dma = dev->dma;
dma               861 drivers/gpu/drm/r128/r128_cce.c 	for (i = 0; i < dma->buf_count; i++) {
dma               862 drivers/gpu/drm/r128/r128_cce.c 		struct drm_buf *buf = dma->buflist[i];
dma               917 drivers/gpu/drm/r128/r128_cce.c 	struct drm_device_dma *dma = dev->dma;
dma               933 drivers/gpu/drm/r128/r128_cce.c 	if (d->request_count < 0 || d->request_count > dma->buf_count) {
dma               935 drivers/gpu/drm/r128/r128_cce.c 			  task_pid_nr(current), d->request_count, dma->buf_count);
dma               783 drivers/gpu/drm/r128/r128_state.c 	struct drm_device_dma *dma = dev->dma;
dma               829 drivers/gpu/drm/r128/r128_state.c 	buf = dma->buflist[blit->idx];
dma              1317 drivers/gpu/drm/r128/r128_state.c 	struct drm_device_dma *dma = dev->dma;
dma              1329 drivers/gpu/drm/r128/r128_state.c 	if (vertex->idx < 0 || vertex->idx >= dma->buf_count) {
dma              1331 drivers/gpu/drm/r128/r128_state.c 			  vertex->idx, dma->buf_count - 1);
dma              1343 drivers/gpu/drm/r128/r128_state.c 	buf = dma->buflist[vertex->idx];
dma              1369 drivers/gpu/drm/r128/r128_state.c 	struct drm_device_dma *dma = dev->dma;
dma              1382 drivers/gpu/drm/r128/r128_state.c 	if (elts->idx < 0 || elts->idx >= dma->buf_count) {
dma              1384 drivers/gpu/drm/r128/r128_state.c 			  elts->idx, dma->buf_count - 1);
dma              1396 drivers/gpu/drm/r128/r128_state.c 	buf = dma->buflist[elts->idx];
dma              1433 drivers/gpu/drm/r128/r128_state.c 	struct drm_device_dma *dma = dev->dma;
dma              1444 drivers/gpu/drm/r128/r128_state.c 	if (blit->idx < 0 || blit->idx >= dma->buf_count) {
dma              1446 drivers/gpu/drm/r128/r128_state.c 			  blit->idx, dma->buf_count - 1);
dma              1515 drivers/gpu/drm/r128/r128_state.c 	struct drm_device_dma *dma = dev->dma;
dma              1531 drivers/gpu/drm/r128/r128_state.c 	if (indirect->idx < 0 || indirect->idx >= dma->buf_count) {
dma              1533 drivers/gpu/drm/r128/r128_state.c 			  indirect->idx, dma->buf_count - 1);
dma              1537 drivers/gpu/drm/r128/r128_state.c 	buf = dma->buflist[indirect->idx];
dma              1917 drivers/gpu/drm/radeon/radeon.h 		struct radeon_fence *(*dma)(struct radeon_device *rdev,
dma              2735 drivers/gpu/drm/radeon/radeon.h #define radeon_copy_dma(rdev, s, d, np, resv) (rdev)->asic->copy.dma((rdev), (s), (d), (np), (resv))
dma               231 drivers/gpu/drm/radeon/radeon_asic.c 		.dma = NULL,
dma               299 drivers/gpu/drm/radeon/radeon_asic.c 		.dma = &r200_copy_dma,
dma               395 drivers/gpu/drm/radeon/radeon_asic.c 		.dma = &r200_copy_dma,
dma               463 drivers/gpu/drm/radeon/radeon_asic.c 		.dma = &r200_copy_dma,
dma               531 drivers/gpu/drm/radeon/radeon_asic.c 		.dma = &r200_copy_dma,
dma               599 drivers/gpu/drm/radeon/radeon_asic.c 		.dma = &r200_copy_dma,
dma               667 drivers/gpu/drm/radeon/radeon_asic.c 		.dma = &r200_copy_dma,
dma               735 drivers/gpu/drm/radeon/radeon_asic.c 		.dma = &r200_copy_dma,
dma               803 drivers/gpu/drm/radeon/radeon_asic.c 		.dma = &r200_copy_dma,
dma               871 drivers/gpu/drm/radeon/radeon_asic.c 		.dma = &r200_copy_dma,
dma               968 drivers/gpu/drm/radeon/radeon_asic.c 		.dma = &r600_copy_dma,
dma              1054 drivers/gpu/drm/radeon/radeon_asic.c 		.dma = &r600_copy_dma,
dma              1147 drivers/gpu/drm/radeon/radeon_asic.c 		.dma = &r600_copy_dma,
dma              1253 drivers/gpu/drm/radeon/radeon_asic.c 		.dma = &rv770_copy_dma,
dma              1373 drivers/gpu/drm/radeon/radeon_asic.c 		.dma = &evergreen_copy_dma,
dma              1467 drivers/gpu/drm/radeon/radeon_asic.c 		.dma = &evergreen_copy_dma,
dma              1560 drivers/gpu/drm/radeon/radeon_asic.c 		.dma = &evergreen_copy_dma,
dma              1708 drivers/gpu/drm/radeon/radeon_asic.c 		.dma = &evergreen_copy_dma,
dma              1828 drivers/gpu/drm/radeon/radeon_asic.c 		.dma = &evergreen_copy_dma,
dma              1966 drivers/gpu/drm/radeon/radeon_asic.c 		.dma = &si_copy_dma,
dma              2136 drivers/gpu/drm/radeon/radeon_asic.c 		.dma = &cik_copy_dma,
dma              2249 drivers/gpu/drm/radeon/radeon_asic.c 		.dma = &cik_copy_dma,
dma               122 drivers/gpu/drm/radeon/radeon_benchmark.c 	if (rdev->asic->copy.dma) {
dma               256 drivers/gpu/drm/radeon/radeon_test.c 	if (rdev->asic->copy.dma)
dma               336 drivers/gpu/drm/rcar-du/rcar_du_plane.c 	u32 dma[2];
dma               353 drivers/gpu/drm/rcar-du/rcar_du_plane.c 			dma[i] = gem->paddr + fb->offsets[i];
dma               357 drivers/gpu/drm/rcar-du/rcar_du_plane.c 		dma[0] = 0;
dma               358 drivers/gpu/drm/rcar-du/rcar_du_plane.c 		dma[1] = 0;
dma               386 drivers/gpu/drm/rcar-du/rcar_du_plane.c 	rcar_du_plane_write(rgrp, index, PnDSA0R, dma[0]);
dma               397 drivers/gpu/drm/rcar-du/rcar_du_plane.c 		rcar_du_plane_write(rgrp, index, PnDSA0R, dma[1]);
dma               218 drivers/gpu/drm/savage/savage_bci.c 	struct drm_device_dma *dma = dev->dma;
dma               222 drivers/gpu/drm/savage/savage_bci.c 	DRM_DEBUG("count=%d\n", dma->buf_count);
dma               232 drivers/gpu/drm/savage/savage_bci.c 	for (i = 0; i < dma->buf_count; i++) {
dma               233 drivers/gpu/drm/savage/savage_bci.c 		buf = dma->buflist[i];
dma               761 drivers/gpu/drm/savage/savage_bci.c 		if (dev->dma && dev->dma->buflist) {
dma              1016 drivers/gpu/drm/savage/savage_bci.c 	struct drm_device_dma *dma = dev->dma;
dma              1032 drivers/gpu/drm/savage/savage_bci.c 	if (d->request_count < 0 || d->request_count > dma->buf_count) {
dma              1034 drivers/gpu/drm/savage/savage_bci.c 			  task_pid_nr(current), d->request_count, dma->buf_count);
dma              1049 drivers/gpu/drm/savage/savage_bci.c 	struct drm_device_dma *dma = dev->dma;
dma              1054 drivers/gpu/drm/savage/savage_bci.c 	if (!dma)
dma              1058 drivers/gpu/drm/savage/savage_bci.c 	if (!dma->buflist)
dma              1066 drivers/gpu/drm/savage/savage_bci.c 	for (i = 0; i < dma->buf_count; i++) {
dma              1067 drivers/gpu/drm/savage/savage_bci.c 		struct drm_buf *buf = dma->buflist[i];
dma               970 drivers/gpu/drm/savage/savage_state.c 	struct drm_device_dma *dma = dev->dma;
dma               984 drivers/gpu/drm/savage/savage_state.c 	if (dma && dma->buflist) {
dma               985 drivers/gpu/drm/savage/savage_state.c 		if (cmdbuf->dma_idx >= dma->buf_count) {
dma               988 drivers/gpu/drm/savage/savage_state.c 			     cmdbuf->dma_idx, dma->buf_count - 1);
dma               991 drivers/gpu/drm/savage/savage_state.c 		dmabuf = dma->buflist[cmdbuf->dma_idx];
dma               212 drivers/gpu/drm/shmobile/shmob_drm_crtc.c 	lcdc_write(sdev, LDSA1R, scrtc->dma[0]);
dma               214 drivers/gpu/drm/shmobile/shmob_drm_crtc.c 		lcdc_write(sdev, LDSA2R, scrtc->dma[1]);
dma               295 drivers/gpu/drm/shmobile/shmob_drm_crtc.c 	scrtc->dma[0] = gem->paddr + fb->offsets[0]
dma               301 drivers/gpu/drm/shmobile/shmob_drm_crtc.c 		scrtc->dma[1] = gem->paddr + fb->offsets[1]
dma               314 drivers/gpu/drm/shmobile/shmob_drm_crtc.c 	lcdc_write_mirror(sdev, LDSA1R, scrtc->dma[0]);
dma               316 drivers/gpu/drm/shmobile/shmob_drm_crtc.c 		lcdc_write_mirror(sdev, LDSA2R, scrtc->dma[1]);
dma                29 drivers/gpu/drm/shmobile/shmob_drm_crtc.h 	unsigned long dma[2];
dma                27 drivers/gpu/drm/shmobile/shmob_drm_plane.c 	unsigned long dma[2];
dma                48 drivers/gpu/drm/shmobile/shmob_drm_plane.c 	splane->dma[0] = gem->paddr + fb->offsets[0]
dma                54 drivers/gpu/drm/shmobile/shmob_drm_plane.c 		splane->dma[1] = gem->paddr + fb->offsets[1]
dma               142 drivers/gpu/drm/shmobile/shmob_drm_plane.c 	lcdc_write(sdev, LDBnBSAYR(splane->index), splane->dma[0]);
dma               144 drivers/gpu/drm/shmobile/shmob_drm_plane.c 		lcdc_write(sdev, LDBnBSACR(splane->index), splane->dma[1]);
dma              1125 drivers/gpu/drm/tegra/drm.c void *tegra_drm_alloc(struct tegra_drm *tegra, size_t size, dma_addr_t *dma)
dma              1157 drivers/gpu/drm/tegra/drm.c 		*dma = virt_to_phys(virt);
dma              1169 drivers/gpu/drm/tegra/drm.c 	*dma = iova_dma_addr(&tegra->carveout.domain, alloc);
dma              1170 drivers/gpu/drm/tegra/drm.c 	err = iommu_map(tegra->domain, *dma, virt_to_phys(virt),
dma              1186 drivers/gpu/drm/tegra/drm.c 		    dma_addr_t dma)
dma              1194 drivers/gpu/drm/tegra/drm.c 		iommu_unmap(tegra->domain, dma, size);
dma              1196 drivers/gpu/drm/tegra/drm.c 			  iova_pfn(&tegra->carveout.domain, dma));
dma               126 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 	dma_addr_t dma;
dma               289 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 	dma_addr_t dma = d_page->dma;
dma               294 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 	dma_free_attrs(pool->dev, pool->size, (void *)d_page->vaddr, dma, attrs);
dma               312 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 	vaddr = dma_alloc_attrs(pool->dev, pool->size, &d_page->dma,
dma               848 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 		ttm_dma->dma_address[index] = d_page->dma;
dma               158 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 		SVGA3dCmdSurfaceDMA dma;
dma               168 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 	if (cmd->dma.host.face != 0 || cmd->dma.host.mipmap != 0) {
dma               182 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 	if (cmd->dma.guest.ptr.offset % PAGE_SIZE ||
dma               194 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 			  cmd->dma.guest.ptr.offset);
dma               198 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 	kmap_offset = cmd->dma.guest.ptr.offset >> PAGE_SHIFT;
dma               213 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 	if (box->w == 64 && cmd->dma.guest.pitch == 64*4) {
dma               219 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 			       virtual + i * cmd->dma.guest.pitch,
dma                57 drivers/gpu/host1x/cdma.c 		iommu_unmap(host1x->domain, pb->dma, pb->alloc_size);
dma                58 drivers/gpu/host1x/cdma.c 		free_iova(&host1x->iova, iova_pfn(&host1x->iova, pb->dma));
dma               106 drivers/gpu/host1x/cdma.c 		pb->dma = iova_dma_addr(&host1x->iova, alloc);
dma               107 drivers/gpu/host1x/cdma.c 		err = iommu_map(host1x->domain, pb->dma, pb->phys, size,
dma               117 drivers/gpu/host1x/cdma.c 		pb->dma = pb->phys;
dma                35 drivers/gpu/host1x/cdma.h 	dma_addr_t dma;			/* device address of pushbuffer */
dma                52 drivers/gpu/host1x/hw/cdma_hw.c 	start = cdma->push_buffer.dma;
dma               103 drivers/gpu/host1x/hw/cdma_hw.c 	start = cdma->push_buffer.dma;
dma              1172 drivers/gpu/ipu-v3/ipu-common.c 			.dma[0] = IPUV3_CHANNEL_CSI0,
dma              1173 drivers/gpu/ipu-v3/ipu-common.c 			.dma[1] = -EINVAL,
dma              1179 drivers/gpu/ipu-v3/ipu-common.c 			.dma[0] = IPUV3_CHANNEL_CSI1,
dma              1180 drivers/gpu/ipu-v3/ipu-common.c 			.dma[1] = -EINVAL,
dma              1188 drivers/gpu/ipu-v3/ipu-common.c 			.dma[0] = IPUV3_CHANNEL_MEM_BG_SYNC,
dma              1189 drivers/gpu/ipu-v3/ipu-common.c 			.dma[1] = IPUV3_CHANNEL_MEM_FG_SYNC,
dma              1197 drivers/gpu/ipu-v3/ipu-common.c 			.dma[0] = IPUV3_CHANNEL_MEM_DC_SYNC,
dma              1198 drivers/gpu/ipu-v3/ipu-common.c 			.dma[1] = -EINVAL,
dma              1827 drivers/gpu/ipu-v3/ipu-image-convert.c 	const struct ipu_image_convert_dma_chan *dma = chan->dma_ch;
dma              1840 drivers/gpu/ipu-v3/ipu-image-convert.c 	chan->in_chan = ipu_idmac_get(priv->ipu, dma->in);
dma              1841 drivers/gpu/ipu-v3/ipu-image-convert.c 	chan->out_chan = ipu_idmac_get(priv->ipu, dma->out);
dma              1848 drivers/gpu/ipu-v3/ipu-image-convert.c 	chan->rotation_in_chan = ipu_idmac_get(priv->ipu, dma->rot_in);
dma              1849 drivers/gpu/ipu-v3/ipu-image-convert.c 	chan->rotation_out_chan = ipu_idmac_get(priv->ipu, dma->rot_out);
dma                96 drivers/i2c/busses/i2c-at91-master.c 	struct at91_twi_dma *dma = &dev->dma;
dma               100 drivers/i2c/busses/i2c-at91-master.c 	if (dma->xfer_in_progress) {
dma               101 drivers/i2c/busses/i2c-at91-master.c 		if (dma->direction == DMA_FROM_DEVICE)
dma               102 drivers/i2c/busses/i2c-at91-master.c 			dmaengine_terminate_all(dma->chan_rx);
dma               104 drivers/i2c/busses/i2c-at91-master.c 			dmaengine_terminate_all(dma->chan_tx);
dma               105 drivers/i2c/busses/i2c-at91-master.c 		dma->xfer_in_progress = false;
dma               107 drivers/i2c/busses/i2c-at91-master.c 	if (dma->buf_mapped) {
dma               108 drivers/i2c/busses/i2c-at91-master.c 		dma_unmap_single(dev->dev, sg_dma_address(&dma->sg[0]),
dma               109 drivers/i2c/busses/i2c-at91-master.c 				 dev->buf_len, dma->direction);
dma               110 drivers/i2c/busses/i2c-at91-master.c 		dma->buf_mapped = false;
dma               140 drivers/i2c/busses/i2c-at91-master.c 	dma_unmap_single(dev->dev, sg_dma_address(&dev->dma.sg[0]),
dma               159 drivers/i2c/busses/i2c-at91-master.c 	struct at91_twi_dma *dma = &dev->dma;
dma               160 drivers/i2c/busses/i2c-at91-master.c 	struct dma_chan *chan_tx = dma->chan_tx;
dma               166 drivers/i2c/busses/i2c-at91-master.c 	dma->direction = DMA_TO_DEVICE;
dma               175 drivers/i2c/busses/i2c-at91-master.c 	dma->buf_mapped = true;
dma               187 drivers/i2c/busses/i2c-at91-master.c 			sg = &dma->sg[sg_len++];
dma               194 drivers/i2c/busses/i2c-at91-master.c 			sg = &dma->sg[sg_len++];
dma               208 drivers/i2c/busses/i2c-at91-master.c 		sg_dma_len(&dma->sg[0]) = dev->buf_len;
dma               209 drivers/i2c/busses/i2c-at91-master.c 		sg_dma_address(&dma->sg[0]) = dma_addr;
dma               212 drivers/i2c/busses/i2c-at91-master.c 	txdesc = dmaengine_prep_slave_sg(chan_tx, dma->sg, sg_len,
dma               223 drivers/i2c/busses/i2c-at91-master.c 	dma->xfer_in_progress = true;
dma               282 drivers/i2c/busses/i2c-at91-master.c 	dma_unmap_single(dev->dev, sg_dma_address(&dev->dma.sg[0]),
dma               298 drivers/i2c/busses/i2c-at91-master.c 	struct at91_twi_dma *dma = &dev->dma;
dma               299 drivers/i2c/busses/i2c-at91-master.c 	struct dma_chan *chan_rx = dma->chan_rx;
dma               303 drivers/i2c/busses/i2c-at91-master.c 	dma->direction = DMA_FROM_DEVICE;
dma               312 drivers/i2c/busses/i2c-at91-master.c 	dma->buf_mapped = true;
dma               328 drivers/i2c/busses/i2c-at91-master.c 	sg_dma_len(&dma->sg[0]) = buf_len;
dma               329 drivers/i2c/busses/i2c-at91-master.c 	sg_dma_address(&dma->sg[0]) = dma_addr;
dma               331 drivers/i2c/busses/i2c-at91-master.c 	rxdesc = dmaengine_prep_slave_sg(chan_rx, dma->sg, 1, DMA_DEV_TO_MEM,
dma               341 drivers/i2c/busses/i2c-at91-master.c 	dma->xfer_in_progress = true;
dma               343 drivers/i2c/busses/i2c-at91-master.c 	dma_async_issue_pending(dma->chan_rx);
dma               693 drivers/i2c/busses/i2c-at91-master.c 	struct at91_twi_dma *dma = &dev->dma;
dma               723 drivers/i2c/busses/i2c-at91-master.c 	dma->chan_tx = dma_request_slave_channel_reason(dev->dev, "tx");
dma               724 drivers/i2c/busses/i2c-at91-master.c 	if (IS_ERR(dma->chan_tx)) {
dma               725 drivers/i2c/busses/i2c-at91-master.c 		ret = PTR_ERR(dma->chan_tx);
dma               726 drivers/i2c/busses/i2c-at91-master.c 		dma->chan_tx = NULL;
dma               730 drivers/i2c/busses/i2c-at91-master.c 	dma->chan_rx = dma_request_slave_channel_reason(dev->dev, "rx");
dma               731 drivers/i2c/busses/i2c-at91-master.c 	if (IS_ERR(dma->chan_rx)) {
dma               732 drivers/i2c/busses/i2c-at91-master.c 		ret = PTR_ERR(dma->chan_rx);
dma               733 drivers/i2c/busses/i2c-at91-master.c 		dma->chan_rx = NULL;
dma               738 drivers/i2c/busses/i2c-at91-master.c 	if (dmaengine_slave_config(dma->chan_tx, &slave_config)) {
dma               745 drivers/i2c/busses/i2c-at91-master.c 	if (dmaengine_slave_config(dma->chan_rx, &slave_config)) {
dma               751 drivers/i2c/busses/i2c-at91-master.c 	sg_init_table(dma->sg, 2);
dma               752 drivers/i2c/busses/i2c-at91-master.c 	dma->buf_mapped = false;
dma               753 drivers/i2c/busses/i2c-at91-master.c 	dma->xfer_in_progress = false;
dma               757 drivers/i2c/busses/i2c-at91-master.c 		 dma_chan_name(dma->chan_tx), dma_chan_name(dma->chan_rx));
dma               764 drivers/i2c/busses/i2c-at91-master.c 	if (dma->chan_rx)
dma               765 drivers/i2c/busses/i2c-at91-master.c 		dma_release_channel(dma->chan_rx);
dma               766 drivers/i2c/busses/i2c-at91-master.c 	if (dma->chan_tx)
dma               767 drivers/i2c/busses/i2c-at91-master.c 		dma_release_channel(dma->chan_tx);
dma               142 drivers/i2c/busses/i2c-at91.h 	struct at91_twi_dma dma;
dma               205 drivers/i2c/busses/i2c-imx.c 	struct imx_i2c_dma	*dma;
dma               286 drivers/i2c/busses/i2c-imx.c 	struct imx_i2c_dma *dma;
dma               291 drivers/i2c/busses/i2c-imx.c 	dma = devm_kzalloc(dev, sizeof(*dma), GFP_KERNEL);
dma               292 drivers/i2c/busses/i2c-imx.c 	if (!dma)
dma               295 drivers/i2c/busses/i2c-imx.c 	dma->chan_tx = dma_request_chan(dev, "tx");
dma               296 drivers/i2c/busses/i2c-imx.c 	if (IS_ERR(dma->chan_tx)) {
dma               297 drivers/i2c/busses/i2c-imx.c 		ret = PTR_ERR(dma->chan_tx);
dma               308 drivers/i2c/busses/i2c-imx.c 	ret = dmaengine_slave_config(dma->chan_tx, &dma_sconfig);
dma               314 drivers/i2c/busses/i2c-imx.c 	dma->chan_rx = dma_request_chan(dev, "rx");
dma               315 drivers/i2c/busses/i2c-imx.c 	if (IS_ERR(dma->chan_rx)) {
dma               316 drivers/i2c/busses/i2c-imx.c 		ret = PTR_ERR(dma->chan_rx);
dma               327 drivers/i2c/busses/i2c-imx.c 	ret = dmaengine_slave_config(dma->chan_rx, &dma_sconfig);
dma               333 drivers/i2c/busses/i2c-imx.c 	i2c_imx->dma = dma;
dma               334 drivers/i2c/busses/i2c-imx.c 	init_completion(&dma->cmd_complete);
dma               336 drivers/i2c/busses/i2c-imx.c 		dma_chan_name(dma->chan_tx), dma_chan_name(dma->chan_rx));
dma               341 drivers/i2c/busses/i2c-imx.c 	dma_release_channel(dma->chan_rx);
dma               343 drivers/i2c/busses/i2c-imx.c 	dma_release_channel(dma->chan_tx);
dma               345 drivers/i2c/busses/i2c-imx.c 	devm_kfree(dev, dma);
dma               351 drivers/i2c/busses/i2c-imx.c 	struct imx_i2c_dma *dma = i2c_imx->dma;
dma               353 drivers/i2c/busses/i2c-imx.c 	dma_unmap_single(dma->chan_using->device->dev, dma->dma_buf,
dma               354 drivers/i2c/busses/i2c-imx.c 			dma->dma_len, dma->dma_data_dir);
dma               355 drivers/i2c/busses/i2c-imx.c 	complete(&dma->cmd_complete);
dma               361 drivers/i2c/busses/i2c-imx.c 	struct imx_i2c_dma *dma = i2c_imx->dma;
dma               364 drivers/i2c/busses/i2c-imx.c 	struct device *chan_dev = dma->chan_using->device->dev;
dma               366 drivers/i2c/busses/i2c-imx.c 	dma->dma_buf = dma_map_single(chan_dev, msgs->buf,
dma               367 drivers/i2c/busses/i2c-imx.c 					dma->dma_len, dma->dma_data_dir);
dma               368 drivers/i2c/busses/i2c-imx.c 	if (dma_mapping_error(chan_dev, dma->dma_buf)) {
dma               373 drivers/i2c/busses/i2c-imx.c 	txdesc = dmaengine_prep_slave_single(dma->chan_using, dma->dma_buf,
dma               374 drivers/i2c/busses/i2c-imx.c 					dma->dma_len, dma->dma_transfer_dir,
dma               381 drivers/i2c/busses/i2c-imx.c 	reinit_completion(&dma->cmd_complete);
dma               389 drivers/i2c/busses/i2c-imx.c 	dma_async_issue_pending(dma->chan_using);
dma               393 drivers/i2c/busses/i2c-imx.c 	dmaengine_terminate_all(dma->chan_using);
dma               395 drivers/i2c/busses/i2c-imx.c 	dma_unmap_single(chan_dev, dma->dma_buf,
dma               396 drivers/i2c/busses/i2c-imx.c 			dma->dma_len, dma->dma_data_dir);
dma               403 drivers/i2c/busses/i2c-imx.c 	struct imx_i2c_dma *dma = i2c_imx->dma;
dma               405 drivers/i2c/busses/i2c-imx.c 	dma->dma_buf = 0;
dma               406 drivers/i2c/busses/i2c-imx.c 	dma->dma_len = 0;
dma               408 drivers/i2c/busses/i2c-imx.c 	dma_release_channel(dma->chan_tx);
dma               409 drivers/i2c/busses/i2c-imx.c 	dma->chan_tx = NULL;
dma               411 drivers/i2c/busses/i2c-imx.c 	dma_release_channel(dma->chan_rx);
dma               412 drivers/i2c/busses/i2c-imx.c 	dma->chan_rx = NULL;
dma               414 drivers/i2c/busses/i2c-imx.c 	dma->chan_using = NULL;
dma               571 drivers/i2c/busses/i2c-imx.c 		if (i2c_imx->dma)
dma               617 drivers/i2c/busses/i2c-imx.c 	struct imx_i2c_dma *dma = i2c_imx->dma;
dma               620 drivers/i2c/busses/i2c-imx.c 	dma->chan_using = dma->chan_tx;
dma               621 drivers/i2c/busses/i2c-imx.c 	dma->dma_transfer_dir = DMA_MEM_TO_DEV;
dma               622 drivers/i2c/busses/i2c-imx.c 	dma->dma_data_dir = DMA_TO_DEVICE;
dma               623 drivers/i2c/busses/i2c-imx.c 	dma->dma_len = msgs->len - 1;
dma               638 drivers/i2c/busses/i2c-imx.c 				&i2c_imx->dma->cmd_complete,
dma               641 drivers/i2c/busses/i2c-imx.c 		dmaengine_terminate_all(dma->chan_using);
dma               679 drivers/i2c/busses/i2c-imx.c 	struct imx_i2c_dma *dma = i2c_imx->dma;
dma               683 drivers/i2c/busses/i2c-imx.c 	dma->chan_using = dma->chan_rx;
dma               684 drivers/i2c/busses/i2c-imx.c 	dma->dma_transfer_dir = DMA_DEV_TO_MEM;
dma               685 drivers/i2c/busses/i2c-imx.c 	dma->dma_data_dir = DMA_FROM_DEVICE;
dma               687 drivers/i2c/busses/i2c-imx.c 	dma->dma_len = msgs->len - 2;
dma               693 drivers/i2c/busses/i2c-imx.c 				&i2c_imx->dma->cmd_complete,
dma               696 drivers/i2c/busses/i2c-imx.c 		dmaengine_terminate_all(dma->chan_using);
dma               793 drivers/i2c/busses/i2c-imx.c 	int use_dma = i2c_imx->dma && msgs->len >= DMA_THRESHOLD && !block_data;
dma               957 drivers/i2c/busses/i2c-imx.c 			if (i2c_imx->dma && msgs[i].len >= DMA_THRESHOLD)
dma              1207 drivers/i2c/busses/i2c-imx.c 	if (i2c_imx->dma)
dma               172 drivers/i2c/busses/i2c-qcom-geni.c 	u32 dma = readl_relaxed(gi2c->se.base + SE_GENI_DMA_MODE_EN);
dma               175 drivers/i2c/busses/i2c-qcom-geni.c 	if (dma) {
dma               183 drivers/i2c/busses/i2c-qcom-geni.c 		dma, tx_st, rx_st, m_stat);
dma               211 drivers/i2c/busses/i2c-qcom-geni.c 	u32 dma;
dma               221 drivers/i2c/busses/i2c-qcom-geni.c 	dma = readl_relaxed(base + SE_GENI_DMA_MODE_EN);
dma               243 drivers/i2c/busses/i2c-qcom-geni.c 		if (!dma)
dma               245 drivers/i2c/busses/i2c-qcom-geni.c 	} else if (dma) {
dma               287 drivers/i2c/busses/i2c-qcom-geni.c 	if (dma && dm_tx_st)
dma               289 drivers/i2c/busses/i2c-qcom-geni.c 	if (dma && dm_rx_st)
dma               224 drivers/i2c/busses/i2c-qup.c 	struct	dma_chan *dma;
dma               618 drivers/i2c/busses/i2c-qup.c 	if (qup->btx.dma)
dma               619 drivers/i2c/busses/i2c-qup.c 		dma_release_channel(qup->btx.dma);
dma               620 drivers/i2c/busses/i2c-qup.c 	if (qup->brx.dma)
dma               621 drivers/i2c/busses/i2c-qup.c 		dma_release_channel(qup->brx.dma);
dma               622 drivers/i2c/busses/i2c-qup.c 	qup->btx.dma = NULL;
dma               623 drivers/i2c/busses/i2c-qup.c 	qup->brx.dma = NULL;
dma               630 drivers/i2c/busses/i2c-qup.c 	if (!qup->btx.dma) {
dma               631 drivers/i2c/busses/i2c-qup.c 		qup->btx.dma = dma_request_slave_channel_reason(qup->dev, "tx");
dma               632 drivers/i2c/busses/i2c-qup.c 		if (IS_ERR(qup->btx.dma)) {
dma               633 drivers/i2c/busses/i2c-qup.c 			err = PTR_ERR(qup->btx.dma);
dma               634 drivers/i2c/busses/i2c-qup.c 			qup->btx.dma = NULL;
dma               640 drivers/i2c/busses/i2c-qup.c 	if (!qup->brx.dma) {
dma               641 drivers/i2c/busses/i2c-qup.c 		qup->brx.dma = dma_request_slave_channel_reason(qup->dev, "rx");
dma               642 drivers/i2c/busses/i2c-qup.c 		if (IS_ERR(qup->brx.dma)) {
dma               644 drivers/i2c/busses/i2c-qup.c 			err = PTR_ERR(qup->brx.dma);
dma               645 drivers/i2c/busses/i2c-qup.c 			qup->brx.dma = NULL;
dma               755 drivers/i2c/busses/i2c-qup.c 	txd = dmaengine_prep_slave_sg(qup->btx.dma, qup->btx.sg, tx_cnt,
dma               775 drivers/i2c/busses/i2c-qup.c 	dma_async_issue_pending(qup->btx.dma);
dma               778 drivers/i2c/busses/i2c-qup.c 		rxd = dmaengine_prep_slave_sg(qup->brx.dma, qup->brx.sg,
dma               786 drivers/i2c/busses/i2c-qup.c 			dmaengine_terminate_all(qup->btx.dma);
dma               798 drivers/i2c/busses/i2c-qup.c 		dma_async_issue_pending(qup->brx.dma);
dma              1910 drivers/i2c/busses/i2c-qup.c 	if (qup->btx.dma)
dma              1911 drivers/i2c/busses/i2c-qup.c 		dma_release_channel(qup->btx.dma);
dma              1912 drivers/i2c/busses/i2c-qup.c 	if (qup->brx.dma)
dma              1913 drivers/i2c/busses/i2c-qup.c 		dma_release_channel(qup->brx.dma);
dma              1922 drivers/i2c/busses/i2c-qup.c 		dma_release_channel(qup->btx.dma);
dma              1923 drivers/i2c/busses/i2c-qup.c 		dma_release_channel(qup->brx.dma);
dma                17 drivers/i2c/busses/i2c-stm32.c 	struct stm32_i2c_dma *dma;
dma                21 drivers/i2c/busses/i2c-stm32.c 	dma = devm_kzalloc(dev, sizeof(*dma), GFP_KERNEL);
dma                22 drivers/i2c/busses/i2c-stm32.c 	if (!dma)
dma                26 drivers/i2c/busses/i2c-stm32.c 	dma->chan_tx = dma_request_chan(dev, "tx");
dma                27 drivers/i2c/busses/i2c-stm32.c 	if (IS_ERR(dma->chan_tx)) {
dma                29 drivers/i2c/busses/i2c-stm32.c 		ret = PTR_ERR(dma->chan_tx);
dma                38 drivers/i2c/busses/i2c-stm32.c 	ret = dmaengine_slave_config(dma->chan_tx, &dma_sconfig);
dma                45 drivers/i2c/busses/i2c-stm32.c 	dma->chan_rx = dma_request_chan(dev, "rx");
dma                46 drivers/i2c/busses/i2c-stm32.c 	if (IS_ERR(dma->chan_rx)) {
dma                48 drivers/i2c/busses/i2c-stm32.c 		ret = PTR_ERR(dma->chan_rx);
dma                57 drivers/i2c/busses/i2c-stm32.c 	ret = dmaengine_slave_config(dma->chan_rx, &dma_sconfig);
dma                63 drivers/i2c/busses/i2c-stm32.c 	init_completion(&dma->dma_complete);
dma                66 drivers/i2c/busses/i2c-stm32.c 		 dma_chan_name(dma->chan_tx), dma_chan_name(dma->chan_rx));
dma                68 drivers/i2c/busses/i2c-stm32.c 	return dma;
dma                71 drivers/i2c/busses/i2c-stm32.c 	dma_release_channel(dma->chan_rx);
dma                73 drivers/i2c/busses/i2c-stm32.c 	dma_release_channel(dma->chan_tx);
dma                75 drivers/i2c/busses/i2c-stm32.c 	devm_kfree(dev, dma);
dma                81 drivers/i2c/busses/i2c-stm32.c void stm32_i2c_dma_free(struct stm32_i2c_dma *dma)
dma                83 drivers/i2c/busses/i2c-stm32.c 	dma->dma_buf = 0;
dma                84 drivers/i2c/busses/i2c-stm32.c 	dma->dma_len = 0;
dma                86 drivers/i2c/busses/i2c-stm32.c 	dma_release_channel(dma->chan_tx);
dma                87 drivers/i2c/busses/i2c-stm32.c 	dma->chan_tx = NULL;
dma                89 drivers/i2c/busses/i2c-stm32.c 	dma_release_channel(dma->chan_rx);
dma                90 drivers/i2c/busses/i2c-stm32.c 	dma->chan_rx = NULL;
dma                92 drivers/i2c/busses/i2c-stm32.c 	dma->chan_using = NULL;
dma                95 drivers/i2c/busses/i2c-stm32.c int stm32_i2c_prep_dma_xfer(struct device *dev, struct stm32_i2c_dma *dma,
dma               105 drivers/i2c/busses/i2c-stm32.c 		dma->chan_using = dma->chan_rx;
dma               106 drivers/i2c/busses/i2c-stm32.c 		dma->dma_transfer_dir = DMA_DEV_TO_MEM;
dma               107 drivers/i2c/busses/i2c-stm32.c 		dma->dma_data_dir = DMA_FROM_DEVICE;
dma               109 drivers/i2c/busses/i2c-stm32.c 		dma->chan_using = dma->chan_tx;
dma               110 drivers/i2c/busses/i2c-stm32.c 		dma->dma_transfer_dir = DMA_MEM_TO_DEV;
dma               111 drivers/i2c/busses/i2c-stm32.c 		dma->dma_data_dir = DMA_TO_DEVICE;
dma               114 drivers/i2c/busses/i2c-stm32.c 	dma->dma_len = len;
dma               115 drivers/i2c/busses/i2c-stm32.c 	chan_dev = dma->chan_using->device->dev;
dma               117 drivers/i2c/busses/i2c-stm32.c 	dma->dma_buf = dma_map_single(chan_dev, buf, dma->dma_len,
dma               118 drivers/i2c/busses/i2c-stm32.c 				      dma->dma_data_dir);
dma               119 drivers/i2c/busses/i2c-stm32.c 	if (dma_mapping_error(chan_dev, dma->dma_buf)) {
dma               124 drivers/i2c/busses/i2c-stm32.c 	txdesc = dmaengine_prep_slave_single(dma->chan_using, dma->dma_buf,
dma               125 drivers/i2c/busses/i2c-stm32.c 					     dma->dma_len,
dma               126 drivers/i2c/busses/i2c-stm32.c 					     dma->dma_transfer_dir,
dma               134 drivers/i2c/busses/i2c-stm32.c 	reinit_completion(&dma->dma_complete);
dma               144 drivers/i2c/busses/i2c-stm32.c 	dma_async_issue_pending(dma->chan_using);
dma               149 drivers/i2c/busses/i2c-stm32.c 	dma_unmap_single(chan_dev, dma->dma_buf, dma->dma_len,
dma               150 drivers/i2c/busses/i2c-stm32.c 			 dma->dma_data_dir);
dma                51 drivers/i2c/busses/i2c-stm32.h void stm32_i2c_dma_free(struct stm32_i2c_dma *dma);
dma                53 drivers/i2c/busses/i2c-stm32.h int stm32_i2c_prep_dma_xfer(struct device *dev, struct stm32_i2c_dma *dma,
dma               303 drivers/i2c/busses/i2c-stm32f7.c 	struct stm32_i2c_dma *dma;
dma               632 drivers/i2c/busses/i2c-stm32f7.c 	struct stm32_i2c_dma *dma = i2c_dev->dma;
dma               633 drivers/i2c/busses/i2c-stm32f7.c 	struct device *dev = dma->chan_using->device->dev;
dma               636 drivers/i2c/busses/i2c-stm32f7.c 	dma_unmap_single(dev, dma->dma_buf, dma->dma_len, dma->dma_data_dir);
dma               637 drivers/i2c/busses/i2c-stm32f7.c 	complete(&dma->dma_complete);
dma               824 drivers/i2c/busses/i2c-stm32f7.c 	if (i2c_dev->dma && f7_msg->count >= STM32F7_I2C_DMA_LEN_MIN) {
dma               825 drivers/i2c/busses/i2c-stm32f7.c 		ret = stm32_i2c_prep_dma_xfer(i2c_dev->dev, i2c_dev->dma,
dma               990 drivers/i2c/busses/i2c-stm32f7.c 	if (i2c_dev->dma && f7_msg->count >= STM32F7_I2C_DMA_LEN_MIN) {
dma               991 drivers/i2c/busses/i2c-stm32f7.c 		ret = stm32_i2c_prep_dma_xfer(i2c_dev->dev, i2c_dev->dma,
dma              1080 drivers/i2c/busses/i2c-stm32f7.c 	if (i2c_dev->dma && f7_msg->count >= STM32F7_I2C_DMA_LEN_MIN &&
dma              1083 drivers/i2c/busses/i2c-stm32f7.c 		ret = stm32_i2c_prep_dma_xfer(i2c_dev->dev, i2c_dev->dma,
dma              1465 drivers/i2c/busses/i2c-stm32f7.c 	struct stm32_i2c_dma *dma = i2c_dev->dma;
dma              1473 drivers/i2c/busses/i2c-stm32f7.c 	ret = wait_for_completion_timeout(&i2c_dev->dma->dma_complete, HZ);
dma              1477 drivers/i2c/busses/i2c-stm32f7.c 		dmaengine_terminate_all(dma->chan_using);
dma              1505 drivers/i2c/busses/i2c-stm32f7.c 	struct stm32_i2c_dma *dma = i2c_dev->dma;
dma              1544 drivers/i2c/busses/i2c-stm32f7.c 		dmaengine_terminate_all(dma->chan_using);
dma              1558 drivers/i2c/busses/i2c-stm32f7.c 	struct stm32_i2c_dma *dma = i2c_dev->dma;
dma              1585 drivers/i2c/busses/i2c-stm32f7.c 			dmaengine_terminate_all(dma->chan_using);
dma              1603 drivers/i2c/busses/i2c-stm32f7.c 	struct stm32_i2c_dma *dma = i2c_dev->dma;
dma              1634 drivers/i2c/busses/i2c-stm32f7.c 			dmaengine_terminate_all(dma->chan_using);
dma              1955 drivers/i2c/busses/i2c-stm32f7.c 	i2c_dev->dma = stm32_i2c_dma_request(i2c_dev->dev, phy_addr,
dma              1958 drivers/i2c/busses/i2c-stm32f7.c 	if (PTR_ERR(i2c_dev->dma) == -ENODEV)
dma              1959 drivers/i2c/busses/i2c-stm32f7.c 		i2c_dev->dma = NULL;
dma              1960 drivers/i2c/busses/i2c-stm32f7.c 	else if (IS_ERR(i2c_dev->dma)) {
dma              1961 drivers/i2c/busses/i2c-stm32f7.c 		ret = PTR_ERR(i2c_dev->dma);
dma              1997 drivers/i2c/busses/i2c-stm32f7.c 	if (i2c_dev->dma) {
dma              1998 drivers/i2c/busses/i2c-stm32f7.c 		stm32_i2c_dma_free(i2c_dev->dma);
dma              1999 drivers/i2c/busses/i2c-stm32f7.c 		i2c_dev->dma = NULL;
dma              2020 drivers/i2c/busses/i2c-stm32f7.c 	if (i2c_dev->dma) {
dma              2021 drivers/i2c/busses/i2c-stm32f7.c 		stm32_i2c_dma_free(i2c_dev->dma);
dma              2022 drivers/i2c/busses/i2c-stm32f7.c 		i2c_dev->dma = NULL;
dma              1051 drivers/i2c/busses/i2c-tegra.c 	bool dma;
dma              1071 drivers/i2c/busses/i2c-tegra.c 	dma = i2c_dev->is_curr_dma_xfer;
dma              1082 drivers/i2c/busses/i2c-tegra.c 	if (dma) {
dma              1109 drivers/i2c/busses/i2c-tegra.c 	if (dma && !i2c_dev->msg_read)
dma              1115 drivers/i2c/busses/i2c-tegra.c 	if (dma && !i2c_dev->msg_read)
dma              1135 drivers/i2c/busses/i2c-tegra.c 	if (dma && !i2c_dev->msg_read)
dma              1141 drivers/i2c/busses/i2c-tegra.c 		if (dma) {
dma              1161 drivers/i2c/busses/i2c-tegra.c 	if (!dma) {
dma              1175 drivers/i2c/busses/i2c-tegra.c 	if (dma) {
dma               294 drivers/ide/icside.c 	disable_dma(ec->dma);
dma               296 drivers/ide/icside.c 	return get_dma_residue(ec->dma) != 0;
dma               305 drivers/ide/icside.c 	BUG_ON(dma_channel_active(ec->dma));
dma               306 drivers/ide/icside.c 	enable_dma(ec->dma);
dma               324 drivers/ide/icside.c 	BUG_ON(dma_channel_active(ec->dma));
dma               339 drivers/ide/icside.c 	set_dma_speed(ec->dma, (unsigned long)ide_get_drivedata(drive));
dma               345 drivers/ide/icside.c 	set_dma_sg(ec->dma, hwif->sg_table, cmd->sg_nents);
dma               346 drivers/ide/icside.c 	set_dma_mode(ec->dma, dma_mode);
dma               520 drivers/ide/icside.c 	if (ec->dma != NO_DMA && !request_dma(ec->dma, DRV_NAME)) {
dma               535 drivers/ide/icside.c 		free_dma(ec->dma);
dma               621 drivers/ide/icside.c 		if (ec->dma != NO_DMA)
dma               622 drivers/ide/icside.c 			free_dma(ec->dma);
dma               559 drivers/ide/ide-atapi.c 				u16 bcount, u8 dma)
dma               561 drivers/ide/ide-atapi.c 	cmd->protocol = dma ? ATAPI_PROT_DMA : ATAPI_PROT_PIO;
dma               565 drivers/ide/ide-atapi.c 	cmd->tf.feature = dma;		/* Use PIO/DMA */
dma               628 drivers/ide/ide-atapi.c 		if (drive->dma)
dma               685 drivers/ide/ide-atapi.c 		if (drive->dma)
dma               714 drivers/ide/ide-atapi.c 		if (drive->dma)
dma               715 drivers/ide/ide-atapi.c 			drive->dma = !ide_dma_prepare(drive, cmd);
dma               734 drivers/ide/ide-atapi.c 			drive->dma = !ide_dma_prepare(drive, cmd);
dma               736 drivers/ide/ide-atapi.c 		if (!drive->dma)
dma               743 drivers/ide/ide-atapi.c 	ide_init_packet_cmd(cmd, valid_tf, bcount, drive->dma);
dma               748 drivers/ide/ide-atapi.c 		if (drive->dma)
dma               612 drivers/ide/ide-cd.c 	int dma_error = 0, dma, thislen, uptodate = 0;
dma               622 drivers/ide/ide-cd.c 	dma = drive->dma;
dma               623 drivers/ide/ide-cd.c 	if (dma) {
dma               624 drivers/ide/ide-cd.c 		drive->dma = 0;
dma               648 drivers/ide/ide-cd.c 	if (dma) {
dma               830 drivers/ide/ide-cd.c 	drive->dma = !!(drive->dev_flags & IDE_DFLAG_USING_DMA);
dma               851 drivers/ide/ide-cd.c 	drive->dma = 0;
dma               859 drivers/ide/ide-cd.c 		drive->dma = !!(drive->dev_flags & IDE_DFLAG_USING_DMA);
dma               871 drivers/ide/ide-cd.c 			drive->dma = 0;
dma                56 drivers/ide/ide-disk.c static void ide_tf_set_cmd(ide_drive_t *drive, struct ide_cmd *cmd, u8 dma)
dma                63 drivers/ide/ide-disk.c 	if (dma) {
dma                88 drivers/ide/ide-disk.c 	u8 dma			= !!(drive->dev_flags & IDE_DFLAG_USING_DMA);
dma                93 drivers/ide/ide-disk.c 	if ((hwif->host_flags & IDE_HFLAG_NO_LBA48_DMA) && lba48 && dma) {
dma                95 drivers/ide/ide-disk.c 			dma = 0;
dma               155 drivers/ide/ide-disk.c 	ide_tf_set_cmd(drive, &cmd, dma);
dma               158 drivers/ide/ide-disk.c 	if (dma == 0) {
dma               165 drivers/ide/ide-disk.c 	if (rc == ide_stopped && dma) {
dma               288 drivers/ide/it821x.c 	static u16 dma[]	= { 0x8866, 0x3222, 0x3121 };
dma               293 drivers/ide/it821x.c 	itdev->mwdma[unit] = dma[mode_wanted];
dma              1458 drivers/ide/pmac.c 	volatile struct dbdma_regs __iomem *dma = pmif->dma_regs;
dma              1467 drivers/ide/pmac.c 	writel((RUN|PAUSE|FLUSH|WAKE|DEAD) << 16, &dma->control);
dma              1468 drivers/ide/pmac.c 	while (readl(&dma->status) & RUN)
dma              1517 drivers/ide/pmac.c 		writel(hwif->dmatable_dma, &dma->cmdptr);
dma              1559 drivers/ide/pmac.c 	volatile struct dbdma_regs __iomem *dma;
dma              1561 drivers/ide/pmac.c 	dma = pmif->dma_regs;
dma              1563 drivers/ide/pmac.c 	writel((RUN << 16) | RUN, &dma->control);
dma              1565 drivers/ide/pmac.c 	(void)readl(&dma->control);
dma              1576 drivers/ide/pmac.c 	volatile struct dbdma_regs __iomem *dma = pmif->dma_regs;
dma              1579 drivers/ide/pmac.c 	dstat = readl(&dma->status);
dma              1580 drivers/ide/pmac.c 	writel(((RUN|WAKE|DEAD) << 16), &dma->control);
dma              1600 drivers/ide/pmac.c 	volatile struct dbdma_regs __iomem *dma = pmif->dma_regs;
dma              1618 drivers/ide/pmac.c 	status = readl(&dma->status);
dma              1629 drivers/ide/pmac.c 	writel((FLUSH << 16) | FLUSH, &dma->control);
dma              1633 drivers/ide/pmac.c 		status = readl(&dma->status);
dma              1654 drivers/ide/pmac.c 	volatile struct dbdma_regs __iomem *dma = pmif->dma_regs;
dma              1655 drivers/ide/pmac.c 	unsigned long status = readl(&dma->status);
dma               302 drivers/ide/siimage.c 	static const u16 dma[]	 = { 0x2208, 0x10C2, 0x10C1 };
dma               327 drivers/ide/siimage.c 		multi  = dma[2];
dma               332 drivers/ide/siimage.c 		multi = dma[speed - XFER_MW_DMA_0];
dma               162 drivers/iio/adc/stm32-adc.c 	void (*start_conv)(struct stm32_adc *, bool dma);
dma               570 drivers/iio/adc/stm32-adc.c static void stm32f4_adc_start_conv(struct stm32_adc *adc, bool dma)
dma               574 drivers/iio/adc/stm32-adc.c 	if (dma)
dma               598 drivers/iio/adc/stm32-adc.c static void stm32h7_adc_start_conv(struct stm32_adc *adc, bool dma)
dma               604 drivers/iio/adc/stm32-adc.c 	if (dma)
dma                51 drivers/iio/adc/ti_am335x_adc.c 	struct tiadc_dma dma;
dma               237 drivers/iio/adc/ti_am335x_adc.c 	struct tiadc_dma *dma = &adc_dev->dma;
dma               241 drivers/iio/adc/ti_am335x_adc.c 	data = dma->buf + dma->current_period * dma->period_size;
dma               242 drivers/iio/adc/ti_am335x_adc.c 	dma->current_period = 1 - dma->current_period; /* swap the buffer ID */
dma               244 drivers/iio/adc/ti_am335x_adc.c 	for (i = 0; i < dma->period_size; i += indio_dev->scan_bytes) {
dma               253 drivers/iio/adc/ti_am335x_adc.c 	struct tiadc_dma *dma = &adc_dev->dma;
dma               256 drivers/iio/adc/ti_am335x_adc.c 	dma->current_period = 0; /* We start to fill period 0 */
dma               264 drivers/iio/adc/ti_am335x_adc.c 	dma->fifo_thresh = rounddown(FIFO1_THRESHOLD + 1,
dma               267 drivers/iio/adc/ti_am335x_adc.c 	dma->period_size = rounddown(DMA_BUFFER_SIZE / 2,
dma               268 drivers/iio/adc/ti_am335x_adc.c 				    (dma->fifo_thresh + 1) * sizeof(u16));
dma               270 drivers/iio/adc/ti_am335x_adc.c 	dma->conf.src_maxburst = dma->fifo_thresh + 1;
dma               271 drivers/iio/adc/ti_am335x_adc.c 	dmaengine_slave_config(dma->chan, &dma->conf);
dma               273 drivers/iio/adc/ti_am335x_adc.c 	desc = dmaengine_prep_dma_cyclic(dma->chan, dma->addr,
dma               274 drivers/iio/adc/ti_am335x_adc.c 					 dma->period_size * 2,
dma               275 drivers/iio/adc/ti_am335x_adc.c 					 dma->period_size, DMA_DEV_TO_MEM,
dma               283 drivers/iio/adc/ti_am335x_adc.c 	dma->cookie = dmaengine_submit(desc);
dma               285 drivers/iio/adc/ti_am335x_adc.c 	dma_async_issue_pending(dma->chan);
dma               287 drivers/iio/adc/ti_am335x_adc.c 	tiadc_writel(adc_dev, REG_FIFO1THR, dma->fifo_thresh);
dma               288 drivers/iio/adc/ti_am335x_adc.c 	tiadc_writel(adc_dev, REG_DMA1REQ, dma->fifo_thresh);
dma               314 drivers/iio/adc/ti_am335x_adc.c 	struct tiadc_dma *dma = &adc_dev->dma;
dma               326 drivers/iio/adc/ti_am335x_adc.c 	if (dma->chan)
dma               335 drivers/iio/adc/ti_am335x_adc.c 	if (!dma->chan)
dma               345 drivers/iio/adc/ti_am335x_adc.c 	struct tiadc_dma *dma = &adc_dev->dma;
dma               353 drivers/iio/adc/ti_am335x_adc.c 	if (dma->chan) {
dma               355 drivers/iio/adc/ti_am335x_adc.c 		dmaengine_terminate_async(dma->chan);
dma               544 drivers/iio/adc/ti_am335x_adc.c 	struct tiadc_dma	*dma = &adc_dev->dma;
dma               548 drivers/iio/adc/ti_am335x_adc.c 	dma->conf.direction = DMA_DEV_TO_MEM;
dma               549 drivers/iio/adc/ti_am335x_adc.c 	dma->conf.src_addr_width = DMA_SLAVE_BUSWIDTH_2_BYTES;
dma               550 drivers/iio/adc/ti_am335x_adc.c 	dma->conf.src_addr = adc_dev->mfd_tscadc->tscadc_phys_base + REG_FIFO1;
dma               556 drivers/iio/adc/ti_am335x_adc.c 	dma->chan = dma_request_chan(adc_dev->mfd_tscadc->dev, "fifo1");
dma               557 drivers/iio/adc/ti_am335x_adc.c 	if (IS_ERR(dma->chan)) {
dma               558 drivers/iio/adc/ti_am335x_adc.c 		int ret = PTR_ERR(dma->chan);
dma               560 drivers/iio/adc/ti_am335x_adc.c 		dma->chan = NULL;
dma               565 drivers/iio/adc/ti_am335x_adc.c 	dma->buf = dma_alloc_coherent(dma->chan->device->dev, DMA_BUFFER_SIZE,
dma               566 drivers/iio/adc/ti_am335x_adc.c 				      &dma->addr, GFP_KERNEL);
dma               567 drivers/iio/adc/ti_am335x_adc.c 	if (!dma->buf)
dma               572 drivers/iio/adc/ti_am335x_adc.c 	dma_release_channel(dma->chan);
dma               677 drivers/iio/adc/ti_am335x_adc.c 	struct tiadc_dma *dma = &adc_dev->dma;
dma               680 drivers/iio/adc/ti_am335x_adc.c 	if (dma->chan) {
dma               681 drivers/iio/adc/ti_am335x_adc.c 		dma_free_coherent(dma->chan->device->dev, DMA_BUFFER_SIZE,
dma               682 drivers/iio/adc/ti_am335x_adc.c 				  dma->buf, dma->addr);
dma               683 drivers/iio/adc/ti_am335x_adc.c 		dma_release_channel(dma->chan);
dma               737 drivers/infiniband/core/umem_odp.c 			dma_addr_t dma = umem_odp->dma_list[idx];
dma               738 drivers/infiniband/core/umem_odp.c 			dma_addr_t dma_addr = dma & ODP_DMA_ADDR_MASK;
dma               745 drivers/infiniband/core/umem_odp.c 			if (dma & ODP_WRITE_ALLOWED_BIT) {
dma               120 drivers/infiniband/hw/bnxt_re/hw_counters.c 	struct ctx_hw_stats *bnxt_re_stats = rdev->qplib_ctx.stats.dma;
dma               764 drivers/infiniband/hw/bnxt_re/qplib_res.c 	if (stats->dma) {
dma               766 drivers/infiniband/hw/bnxt_re/qplib_res.c 				  stats->dma, stats->dma_map);
dma               782 drivers/infiniband/hw/bnxt_re/qplib_res.c 	stats->dma = dma_alloc_coherent(&pdev->dev, stats->size,
dma               784 drivers/infiniband/hw/bnxt_re/qplib_res.c 	if (!stats->dma) {
dma               144 drivers/infiniband/hw/bnxt_re/qplib_res.h 	void				*dma;
dma              1293 drivers/infiniband/hw/hfi1/file_ops.c 					       uctxt->egrbufs.rcvtids[0].dma);
dma               180 drivers/infiniband/hw/hfi1/hfi.h 		dma_addr_t dma;
dma               185 drivers/infiniband/hw/hfi1/hfi.h 		dma_addr_t dma;
dma              1165 drivers/infiniband/hw/hfi1/init.c 		if (rcd->egrbufs.buffers[e].dma)
dma              1169 drivers/infiniband/hw/hfi1/init.c 					  rcd->egrbufs.buffers[e].dma);
dma              1970 drivers/infiniband/hw/hfi1/init.c 					   &rcd->egrbufs.buffers[idx].dma,
dma              1977 drivers/infiniband/hw/hfi1/init.c 			rcd->egrbufs.rcvtids[rcd->egrbufs.alloced].dma =
dma              1978 drivers/infiniband/hw/hfi1/init.c 				rcd->egrbufs.buffers[idx].dma;
dma              2020 drivers/infiniband/hw/hfi1/init.c 				rcd->egrbufs.rcvtids[i].dma =
dma              2021 drivers/infiniband/hw/hfi1/init.c 					rcd->egrbufs.buffers[j].dma + offset;
dma              2025 drivers/infiniband/hw/hfi1/init.c 				if ((rcd->egrbufs.buffers[j].dma + offset +
dma              2027 drivers/infiniband/hw/hfi1/init.c 				    (rcd->egrbufs.buffers[j].dma +
dma              2079 drivers/infiniband/hw/hfi1/init.c 			     rcd->egrbufs.rcvtids[idx].dma, order);
dma              2092 drivers/infiniband/hw/hfi1/init.c 				  rcd->egrbufs.buffers[idx].dma);
dma              2094 drivers/infiniband/hw/hfi1/init.c 		rcd->egrbufs.buffers[idx].dma = 0;
dma               561 drivers/infiniband/hw/hfi1/pio.c static void cr_group_addresses(struct send_context *sc, dma_addr_t *dma)
dma               567 drivers/infiniband/hw/hfi1/pio.c 	*dma = (unsigned long)
dma               568 drivers/infiniband/hw/hfi1/pio.c 	       &((struct credit_return *)sc->dd->cr_base[sc->node].dma)[gc];
dma               701 drivers/infiniband/hw/hfi1/pio.c 	dma_addr_t dma;
dma               755 drivers/infiniband/hw/hfi1/pio.c 	cr_group_addresses(sc, &dma);
dma               798 drivers/infiniband/hw/hfi1/pio.c 	reg = dma & SC(CREDIT_RETURN_ADDR_ADDRESS_SMASK);
dma              2124 drivers/infiniband/hw/hfi1/pio.c 						       &dd->cr_base[i].dma,
dma              2154 drivers/infiniband/hw/hfi1/pio.c 					  dd->cr_base[i].dma);
dma               162 drivers/infiniband/hw/hfi1/pio.h 	dma_addr_t dma;
dma                86 drivers/infiniband/hw/hfi1/trace_ctxts.h 			   __entry->rcvegr_dma = uctxt->egrbufs.rcvtids[0].dma;
dma                89 drivers/infiniband/hw/hfi1/trace_tid.h 		 unsigned long va, unsigned long pa, dma_addr_t dma),
dma                90 drivers/infiniband/hw/hfi1/trace_tid.h 	TP_ARGS(ctxt, subctxt, rarr, npages, va, pa, dma),
dma                98 drivers/infiniband/hw/hfi1/trace_tid.h 		__field(dma_addr_t, dma)
dma               107 drivers/infiniband/hw/hfi1/trace_tid.h 		__entry->dma = dma;
dma               116 drivers/infiniband/hw/hfi1/trace_tid.h 		  __entry->dma
dma               123 drivers/infiniband/hw/hfi1/trace_tid.h 		 unsigned long va, unsigned long pa, dma_addr_t dma),
dma               124 drivers/infiniband/hw/hfi1/trace_tid.h 	TP_ARGS(ctxt, subctxt, rarr, npages, va, pa, dma)
dma               130 drivers/infiniband/hw/hfi1/trace_tid.h 		 unsigned long va, unsigned long pa, dma_addr_t dma),
dma               131 drivers/infiniband/hw/hfi1/trace_tid.h 	TP_ARGS(ctxt, subctxt, rarr, npages, va, pa, dma)
dma               165 drivers/infiniband/hw/hfi1/trace_tid.h 		 u32 npages, dma_addr_t dma),
dma               166 drivers/infiniband/hw/hfi1/trace_tid.h 	TP_ARGS(ctxt, subctxt, va, rarr, npages, dma),
dma               173 drivers/infiniband/hw/hfi1/trace_tid.h 		__field(dma_addr_t, dma)
dma               181 drivers/infiniband/hw/hfi1/trace_tid.h 		__entry->dma = dma;
dma               189 drivers/infiniband/hw/hfi1/trace_tid.h 		  __entry->dma
dma               275 drivers/infiniband/hw/hns/hns_roce_cmd.c 				      &mailbox->dma);
dma               290 drivers/infiniband/hw/hns/hns_roce_cmd.c 	dma_pool_free(hr_dev->cmd.pool, mailbox->buf, mailbox->dma);
dma                80 drivers/infiniband/hw/hns/hns_roce_cq.c 	return hns_roce_cmd_mbox(dev, mailbox->dma, 0, cq_num, 0,
dma               177 drivers/infiniband/hw/hns/hns_roce_cq.c 	return hns_roce_cmd_mbox(dev, 0, mailbox ? mailbox->dma : 0, cq_num,
dma                45 drivers/infiniband/hw/hns/hns_roce_db.c 	db->dma = sg_dma_address(page->umem->sg_head.sgl) + offset;
dma               119 drivers/infiniband/hw/hns/hns_roce_db.c 	db->dma		= pgdir->db_dma  + db->index * HNS_ROCE_DB_UNIT_SIZE;
dma               479 drivers/infiniband/hw/hns/hns_roce_device.h 	dma_addr_t	dma;
dma               631 drivers/infiniband/hw/hns/hns_roce_device.h 	dma_addr_t		dma;
dma              2560 drivers/infiniband/hw/hns/hns_roce_hw_v1.c 	ret = hns_roce_cmd_mbox(hr_dev, mailbox->dma, 0, hr_qp->qpn, 0,
dma              3366 drivers/infiniband/hw/hns/hns_roce_hw_v1.c 	ret = hns_roce_cmd_mbox(hr_dev, 0, mailbox->dma, hr_qp->qpn, 0,
dma               883 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 	dma_addr_t dma = ring->desc_dma_addr;
dma               886 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 		roce_write(hr_dev, ROCEE_TX_CMQ_BASEADDR_L_REG, (u32)dma);
dma               888 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 			   upper_32_bits(dma));
dma               894 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 		roce_write(hr_dev, ROCEE_RX_CMQ_BASEADDR_L_REG, (u32)dma);
dma               896 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 			   upper_32_bits(dma));
dma              2613 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 		       ((u32)hr_cq->db.dma) >> 1);
dma              2614 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 	cq_context->db_record_addr = cpu_to_le32(hr_cq->db.dma >> 32);
dma              3092 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 			ret = hns_roce_cmd_mbox(hr_dev, bt_ba, mailbox->dma,
dma              3103 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 		ret = hns_roce_cmd_mbox(hr_dev, bt_ba, mailbox->dma, obj,
dma              3158 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 	ret = hns_roce_cmd_mbox(hr_dev, 0, mailbox->dma, obj, 0, op,
dma              3180 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 	ret = hns_roce_cmd_mbox(hr_dev, mailbox->dma, 0, hr_qp->qpn, 0,
dma              3332 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 		       ((u32)hr_qp->rdb.dma) >> 1);
dma              3336 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 	context->rq_db_record_addr = cpu_to_le32(hr_qp->rdb.dma >> 32);
dma              4506 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 	ret = hns_roce_cmd_mbox(hr_dev, 0, mailbox->dma, hr_qp->qpn, 0,
dma              4824 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 	ret = hns_roce_cmd_mbox(hr_dev, mailbox->dma, 0, hr_cq->cqn, 1,
dma              5793 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 	ret = hns_roce_cmd_mbox(hr_dev, mailbox->dma, 0, eq->eqn, 0,
dma              6153 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 		ret = hns_roce_cmd_mbox(hr_dev, mailbox->dma, 0, srq->srqn, 0,
dma              6181 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 	ret = hns_roce_cmd_mbox(hr_dev, 0, mailbox->dma, srq->srqn, 0,
dma                21 drivers/infiniband/hw/hns/hns_roce_hw_v2_dfx.c 	ret = hns_roce_cmd_mbox(hr_dev, 0, mailbox->dma, cqn, 0,
dma                55 drivers/infiniband/hw/hns/hns_roce_mr.c 	return hns_roce_cmd_mbox(hr_dev, mailbox->dma, 0, mpt_index, 0,
dma                64 drivers/infiniband/hw/hns/hns_roce_mr.c 	return hns_roce_cmd_mbox(hr_dev, 0, mailbox ? mailbox->dma : 0,
dma              1305 drivers/infiniband/hw/hns/hns_roce_mr.c 	ret = hns_roce_cmd_mbox(hr_dev, 0, mailbox->dma, mtpt_idx, 0,
dma                66 drivers/infiniband/hw/hns/hns_roce_srq.c 	return hns_roce_cmd_mbox(dev, mailbox->dma, 0, srq_num, 0,
dma                75 drivers/infiniband/hw/hns/hns_roce_srq.c 	return hns_roce_cmd_mbox(dev, 0, mailbox ? mailbox->dma : 0, srq_num,
dma               248 drivers/infiniband/hw/mlx4/cq.c 	err = mlx4_cq_alloc(dev->dev, entries, &cq->buf.mtt, uar, cq->db.dma,
dma                77 drivers/infiniband/hw/mlx4/doorbell.c 	db->dma = sg_dma_address(page->umem->sg_head.sgl) + (virt & ~PAGE_MASK);
dma               175 drivers/infiniband/hw/mlx4/mad.c 	err = mlx4_cmd_box(dev->dev, inmailbox->dma, outmailbox->dma, in_modifier,
dma               180 drivers/infiniband/hw/mlx4/main.c 	err = mlx4_cmd(dev, mailbox->dma,
dma               185 drivers/infiniband/hw/mlx4/main.c 		err += mlx4_cmd(dev, mailbox->dma,
dma               225 drivers/infiniband/hw/mlx4/main.c 	err = mlx4_cmd(dev, mailbox->dma,
dma               230 drivers/infiniband/hw/mlx4/main.c 		err += mlx4_cmd(dev, mailbox->dma,
dma               769 drivers/infiniband/hw/mlx4/main.c 	err = mlx4_cmd_box(mdev->dev, 0, mailbox->dma, port, 0,
dma              1024 drivers/infiniband/hw/mlx4/main.c 	mlx4_cmd(to_mdev(ibdev)->dev, mailbox->dma, 1, 0,
dma              1050 drivers/infiniband/hw/mlx4/main.c 	err = mlx4_cmd(dev->dev, mailbox->dma, port, MLX4_SET_PORT_IB_OPCODE,
dma              1604 drivers/infiniband/hw/mlx4/main.c 	ret = mlx4_cmd_imm(mdev->dev, mailbox->dma, reg_id, size >> 2, 0,
dma              2461 drivers/infiniband/hw/mlx4/qp.c 		context->db_rec_addr = cpu_to_be64(qp->db.dma);
dma               180 drivers/infiniband/hw/mlx4/srq.c 			     &srq->mtt, srq->db.dma, &srq->msrq);
dma               977 drivers/infiniband/hw/mlx5/cq.c 	MLX5_SET64(cqc, cqc, dbr_addr, cq->db.dma);
dma                77 drivers/infiniband/hw/mlx5/doorbell.c 	db->dma = sg_dma_address(page->umem->sg_head.sgl) + (virt & ~PAGE_MASK);
dma               926 drivers/infiniband/hw/mlx5/mr.c 	dma_addr_t dma;
dma               977 drivers/infiniband/hw/mlx5/mr.c 	dma = dma_map_single(ddev, xlt, size, DMA_TO_DEVICE);
dma               978 drivers/infiniband/hw/mlx5/mr.c 	if (dma_mapping_error(ddev, dma)) {
dma               984 drivers/infiniband/hw/mlx5/mr.c 	sg.addr = dma;
dma              1006 drivers/infiniband/hw/mlx5/mr.c 		dma_sync_single_for_cpu(ddev, dma, size, DMA_TO_DEVICE);
dma              1010 drivers/infiniband/hw/mlx5/mr.c 		dma_sync_single_for_device(ddev, dma, size, DMA_TO_DEVICE);
dma              1035 drivers/infiniband/hw/mlx5/mr.c 	dma_unmap_single(ddev, dma, size, DMA_TO_DEVICE);
dma              2249 drivers/infiniband/hw/mlx5/qp.c 	MLX5_SET64(qpc, qpc, dbr_addr, qp->db.dma);
dma              3612 drivers/infiniband/hw/mlx5/qp.c 		context->db_rec_addr = cpu_to_be64(qp->db.dma);
dma              5972 drivers/infiniband/hw/mlx5/qp.c 	MLX5_SET64(wq, wq, dbr_addr, rwq->db.dma);
dma               300 drivers/infiniband/hw/mlx5/srq.c 	in.db_record = srq->db.dma;
dma               618 drivers/infiniband/hw/mthca/mthca_cmd.c 	mailbox->buf = dma_pool_alloc(dev->cmd.pool, gfp_mask, &mailbox->dma);
dma               632 drivers/infiniband/hw/mthca/mthca_cmd.c 	dma_pool_free(dev->cmd.pool, mailbox->buf, mailbox->dma);
dma               705 drivers/infiniband/hw/mthca/mthca_cmd.c 				err = mthca_cmd(dev, mailbox->dma, nent, 0, op,
dma               715 drivers/infiniband/hw/mthca/mthca_cmd.c 		err = mthca_cmd(dev, mailbox->dma, nent, 0, op,
dma               810 drivers/infiniband/hw/mthca/mthca_cmd.c 	err = mthca_cmd_box(dev, 0, mailbox->dma, 0, 0, CMD_QUERY_FW,
dma               903 drivers/infiniband/hw/mthca/mthca_cmd.c 	err = mthca_cmd_box(dev, 0, mailbox->dma, 0, 0, CMD_ENABLE_LAM,
dma               958 drivers/infiniband/hw/mthca/mthca_cmd.c 	err = mthca_cmd_box(dev, 0, mailbox->dma, 0, 0, CMD_QUERY_DDR,
dma              1064 drivers/infiniband/hw/mthca/mthca_cmd.c 	err = mthca_cmd_box(dev, 0, mailbox->dma, 0, 0, CMD_QUERY_DEV_LIM,
dma              1287 drivers/infiniband/hw/mthca/mthca_cmd.c 	err = mthca_cmd_box(dev, 0, mailbox->dma, 0, 0, CMD_QUERY_ADAPTER,
dma              1429 drivers/infiniband/hw/mthca/mthca_cmd.c 	err = mthca_cmd(dev, mailbox->dma, 0, 0,
dma              1481 drivers/infiniband/hw/mthca/mthca_cmd.c 	err = mthca_cmd(dev, mailbox->dma, port, 0, CMD_INIT_IB,
dma              1527 drivers/infiniband/hw/mthca/mthca_cmd.c 	err = mthca_cmd(dev, mailbox->dma, port, 0, CMD_SET_IB,
dma              1553 drivers/infiniband/hw/mthca/mthca_cmd.c 	err = mthca_cmd(dev, mailbox->dma, 1, 0, CMD_MAP_ICM,
dma              1605 drivers/infiniband/hw/mthca/mthca_cmd.c 	return mthca_cmd(dev, mailbox->dma, mpt_index, 0, CMD_SW2HW_MPT,
dma              1612 drivers/infiniband/hw/mthca/mthca_cmd.c 	return mthca_cmd_box(dev, 0, mailbox ? mailbox->dma : 0, mpt_index,
dma              1620 drivers/infiniband/hw/mthca/mthca_cmd.c 	return mthca_cmd(dev, mailbox->dma, num_mtt, 0, CMD_WRITE_MTT,
dma              1642 drivers/infiniband/hw/mthca/mthca_cmd.c 	return mthca_cmd(dev, mailbox->dma, eq_num, 0, CMD_SW2HW_EQ,
dma              1649 drivers/infiniband/hw/mthca/mthca_cmd.c 	return mthca_cmd_box(dev, 0, mailbox->dma, eq_num, 0,
dma              1657 drivers/infiniband/hw/mthca/mthca_cmd.c 	return mthca_cmd(dev, mailbox->dma, cq_num, 0, CMD_SW2HW_CQ,
dma              1664 drivers/infiniband/hw/mthca/mthca_cmd.c 	return mthca_cmd_box(dev, 0, mailbox->dma, cq_num, 0,
dma              1692 drivers/infiniband/hw/mthca/mthca_cmd.c 	err = mthca_cmd(dev, mailbox->dma, cq_num, 1, CMD_RESIZE_CQ,
dma              1702 drivers/infiniband/hw/mthca/mthca_cmd.c 	return mthca_cmd(dev, mailbox->dma, srq_num, 0, CMD_SW2HW_SRQ,
dma              1709 drivers/infiniband/hw/mthca/mthca_cmd.c 	return mthca_cmd_box(dev, 0, mailbox->dma, srq_num, 0,
dma              1717 drivers/infiniband/hw/mthca/mthca_cmd.c 	return mthca_cmd_box(dev, 0, mailbox->dma, num, 0,
dma              1788 drivers/infiniband/hw/mthca/mthca_cmd.c 		err = mthca_cmd_box(dev, 0, mailbox ? mailbox->dma : 0,
dma              1823 drivers/infiniband/hw/mthca/mthca_cmd.c 		err = mthca_cmd(dev, mailbox->dma, optmask | (!!is_ee << 24) | num,
dma              1833 drivers/infiniband/hw/mthca/mthca_cmd.c 	return mthca_cmd_box(dev, 0, mailbox->dma, (!!is_ee << 24) | num, 0,
dma              1929 drivers/infiniband/hw/mthca/mthca_cmd.c 	err = mthca_cmd_box(dev, inmailbox->dma, outmailbox->dma,
dma              1944 drivers/infiniband/hw/mthca/mthca_cmd.c 	return mthca_cmd_box(dev, 0, mailbox->dma, index, 0,
dma              1951 drivers/infiniband/hw/mthca/mthca_cmd.c 	return mthca_cmd(dev, mailbox->dma, index, 0, CMD_WRITE_MGM,
dma              1961 drivers/infiniband/hw/mthca/mthca_cmd.c 	err = mthca_cmd_imm(dev, mailbox->dma, &imm, 0, 0, CMD_MGID_HASH,
dma               116 drivers/infiniband/hw/mthca/mthca_cmd.h 	dma_addr_t dma;
dma               133 drivers/infiniband/hw/ocrdma/ocrdma.h 	dma_addr_t dma;
dma               373 drivers/infiniband/hw/ocrdma/ocrdma_hw.c 	dma_free_coherent(&dev->nic_info.pdev->dev, q->size, q->va, q->dma);
dma               383 drivers/infiniband/hw/ocrdma/ocrdma_hw.c 	q->va = dma_alloc_coherent(&dev->nic_info.pdev->dev, q->size, &q->dma,
dma               448 drivers/infiniband/hw/ocrdma/ocrdma_hw.c 	ocrdma_build_q_pages(&cmd->pa[0], cmd->num_pages, eq->q.dma,
dma               545 drivers/infiniband/hw/ocrdma/ocrdma_hw.c 			     cq->dma, PAGE_SIZE_4K);
dma               594 drivers/infiniband/hw/ocrdma/ocrdma_hw.c 	ocrdma_build_q_pages(pa, num_pages, mq->dma, PAGE_SIZE_4K);
dma              1331 drivers/infiniband/hw/ocrdma/ocrdma_hw.c 	struct ocrdma_dma_mem dma;
dma              1340 drivers/infiniband/hw/ocrdma/ocrdma_hw.c 	dma.size = sizeof(struct ocrdma_get_ctrl_attribs_rsp);
dma              1341 drivers/infiniband/hw/ocrdma/ocrdma_hw.c 	dma.va	 = dma_alloc_coherent(&dev->nic_info.pdev->dev,
dma              1342 drivers/infiniband/hw/ocrdma/ocrdma_hw.c 					dma.size, &dma.pa, GFP_KERNEL);
dma              1343 drivers/infiniband/hw/ocrdma/ocrdma_hw.c 	if (!dma.va)
dma              1346 drivers/infiniband/hw/ocrdma/ocrdma_hw.c 	mqe->hdr.pyld_len = dma.size;
dma              1350 drivers/infiniband/hw/ocrdma/ocrdma_hw.c 	mqe->u.nonemb_req.sge[0].pa_lo = (u32) (dma.pa & 0xffffffff);
dma              1351 drivers/infiniband/hw/ocrdma/ocrdma_hw.c 	mqe->u.nonemb_req.sge[0].pa_hi = (u32) upper_32_bits(dma.pa);
dma              1352 drivers/infiniband/hw/ocrdma/ocrdma_hw.c 	mqe->u.nonemb_req.sge[0].len = dma.size;
dma              1354 drivers/infiniband/hw/ocrdma/ocrdma_hw.c 	ocrdma_init_mch((struct ocrdma_mbx_hdr *)dma.va,
dma              1357 drivers/infiniband/hw/ocrdma/ocrdma_hw.c 			dma.size);
dma              1359 drivers/infiniband/hw/ocrdma/ocrdma_hw.c 	status = ocrdma_nonemb_mbx_cmd(dev, mqe, dma.va);
dma              1361 drivers/infiniband/hw/ocrdma/ocrdma_hw.c 		ctrl_attr_rsp = (struct ocrdma_get_ctrl_attribs_rsp *)dma.va;
dma              1371 drivers/infiniband/hw/ocrdma/ocrdma_hw.c 	dma_free_coherent(&dev->nic_info.pdev->dev, dma.size, dma.va, dma.pa);
dma               396 drivers/infiniband/hw/qib/qib_tx.c 			unsigned long dma;
dma               415 drivers/infiniband/hw/qib/qib_tx.c 			dma = (unsigned long)
dma               418 drivers/infiniband/hw/qib/qib_tx.c 				      start) % BITS_PER_LONG, &dma))
dma               374 drivers/infiniband/sw/rxe/rxe_comp.c 			&wqe->dma, payload_addr(pkt),
dma               379 drivers/infiniband/sw/rxe/rxe_comp.c 	if (wqe->dma.resid == 0 && (pkt->mask & RXE_END_MASK))
dma               394 drivers/infiniband/sw/rxe/rxe_comp.c 			&wqe->dma, &atomic_orig,
dma               416 drivers/infiniband/sw/rxe/rxe_comp.c 		wc->byte_len		= wqe->dma.length;
dma               427 drivers/infiniband/sw/rxe/rxe_comp.c 		uwc->byte_len		= wqe->dma.length;
dma               120 drivers/infiniband/sw/rxe/rxe_loc.h 	      struct rxe_dma_info *dma, void *addr, int length,
dma               140 drivers/infiniband/sw/rxe/rxe_loc.h int advance_dma_data(struct rxe_dma_info *dma, unsigned int length);
dma               438 drivers/infiniband/sw/rxe/rxe_mr.c 	struct rxe_dma_info	*dma,
dma               445 drivers/infiniband/sw/rxe/rxe_mr.c 	struct rxe_sge		*sge	= &dma->sge[dma->cur_sge];
dma               446 drivers/infiniband/sw/rxe/rxe_mr.c 	int			offset	= dma->sge_offset;
dma               447 drivers/infiniband/sw/rxe/rxe_mr.c 	int			resid	= dma->resid;
dma               477 drivers/infiniband/sw/rxe/rxe_mr.c 			dma->cur_sge++;
dma               480 drivers/infiniband/sw/rxe/rxe_mr.c 			if (dma->cur_sge >= dma->num_sge) {
dma               514 drivers/infiniband/sw/rxe/rxe_mr.c 	dma->sge_offset = offset;
dma               515 drivers/infiniband/sw/rxe/rxe_mr.c 	dma->resid	= resid;
dma               529 drivers/infiniband/sw/rxe/rxe_mr.c int advance_dma_data(struct rxe_dma_info *dma, unsigned int length)
dma               531 drivers/infiniband/sw/rxe/rxe_mr.c 	struct rxe_sge		*sge	= &dma->sge[dma->cur_sge];
dma               532 drivers/infiniband/sw/rxe/rxe_mr.c 	int			offset	= dma->sge_offset;
dma               533 drivers/infiniband/sw/rxe/rxe_mr.c 	int			resid	= dma->resid;
dma               540 drivers/infiniband/sw/rxe/rxe_mr.c 			dma->cur_sge++;
dma               542 drivers/infiniband/sw/rxe/rxe_mr.c 			if (dma->cur_sge >= dma->num_sge)
dma               556 drivers/infiniband/sw/rxe/rxe_mr.c 	dma->sge_offset = offset;
dma               557 drivers/infiniband/sw/rxe/rxe_mr.c 	dma->resid	= resid;
dma                51 drivers/infiniband/sw/rxe/rxe_req.c 		int to_send = (wqe->dma.resid > qp->mtu) ?
dma                52 drivers/infiniband/sw/rxe/rxe_req.c 				qp->mtu : wqe->dma.resid;
dma                58 drivers/infiniband/sw/rxe/rxe_req.c 			wqe->dma.resid -= to_send;
dma                59 drivers/infiniband/sw/rxe/rxe_req.c 			wqe->dma.sge_offset += to_send;
dma                61 drivers/infiniband/sw/rxe/rxe_req.c 			advance_dma_data(&wqe->dma, to_send);
dma                99 drivers/infiniband/sw/rxe/rxe_req.c 			wqe->dma.resid = wqe->dma.length;
dma               100 drivers/infiniband/sw/rxe/rxe_req.c 			wqe->dma.cur_sge = 0;
dma               101 drivers/infiniband/sw/rxe/rxe_req.c 			wqe->dma.sge_offset = 0;
dma               114 drivers/infiniband/sw/rxe/rxe_req.c 				npsn = (wqe->dma.length - wqe->dma.resid) /
dma               320 drivers/infiniband/sw/rxe/rxe_req.c 	int fits = (wqe->dma.resid <= qp->mtu);
dma               441 drivers/infiniband/sw/rxe/rxe_req.c 		reth_set_len(pkt, wqe->dma.resid);
dma               488 drivers/infiniband/sw/rxe/rxe_req.c 			u8 *tmp = &wqe->dma.inline_data[wqe->dma.sge_offset];
dma               493 drivers/infiniband/sw/rxe/rxe_req.c 			wqe->dma.resid -= paylen;
dma               494 drivers/infiniband/sw/rxe/rxe_req.c 			wqe->dma.sge_offset += paylen;
dma               496 drivers/infiniband/sw/rxe/rxe_req.c 			err = copy_data(qp->pd, 0, &wqe->dma,
dma               535 drivers/infiniband/sw/rxe/rxe_req.c 	int num_pkt = (wqe->dma.resid + payload + qp->mtu - 1) / qp->mtu;
dma               692 drivers/infiniband/sw/rxe/rxe_req.c 	payload = (mask & RXE_WRITE_OR_SEND) ? wqe->dma.resid : 0;
dma               512 drivers/infiniband/sw/rxe/rxe_resp.c 	err = copy_data(qp->pd, IB_ACCESS_LOCAL_WRITE, &qp->resp.wqe->dma,
dma               869 drivers/infiniband/sw/rxe/rxe_resp.c 					qp->resp.length : wqe->dma.length - wqe->dma.resid;
dma              1189 drivers/infiniband/sw/rxe/rxe_resp.c 			qp->resp.wqe->dma.resid = qp->resp.wqe->dma.length;
dma              1190 drivers/infiniband/sw/rxe/rxe_resp.c 			qp->resp.wqe->dma.cur_sge = 0;
dma              1191 drivers/infiniband/sw/rxe/rxe_resp.c 			qp->resp.wqe->dma.sge_offset = 0;
dma               271 drivers/infiniband/sw/rxe/rxe_verbs.c 	memcpy(recv_wqe->dma.sge, ibwr->sg_list,
dma               274 drivers/infiniband/sw/rxe/rxe_verbs.c 	recv_wqe->dma.length		= length;
dma               275 drivers/infiniband/sw/rxe/rxe_verbs.c 	recv_wqe->dma.resid		= length;
dma               276 drivers/infiniband/sw/rxe/rxe_verbs.c 	recv_wqe->dma.num_sge		= num_sge;
dma               277 drivers/infiniband/sw/rxe/rxe_verbs.c 	recv_wqe->dma.cur_sge		= 0;
dma               278 drivers/infiniband/sw/rxe/rxe_verbs.c 	recv_wqe->dma.sge_offset	= 0;
dma               603 drivers/infiniband/sw/rxe/rxe_verbs.c 		p = wqe->dma.inline_data;
dma               617 drivers/infiniband/sw/rxe/rxe_verbs.c 		memcpy(wqe->dma.sge, ibwr->sg_list,
dma               623 drivers/infiniband/sw/rxe/rxe_verbs.c 	wqe->dma.length		= length;
dma               624 drivers/infiniband/sw/rxe/rxe_verbs.c 	wqe->dma.resid		= length;
dma               625 drivers/infiniband/sw/rxe/rxe_verbs.c 	wqe->dma.num_sge	= num_sge;
dma               626 drivers/infiniband/sw/rxe/rxe_verbs.c 	wqe->dma.cur_sge	= 0;
dma               627 drivers/infiniband/sw/rxe/rxe_verbs.c 	wqe->dma.sge_offset	= 0;
dma               245 drivers/infiniband/ulp/srp/ib_srp.c 	iu->dma = ib_dma_map_single(host->srp_dev->dev, iu->buf, size,
dma               247 drivers/infiniband/ulp/srp/ib_srp.c 	if (ib_dma_mapping_error(host->srp_dev->dev, iu->dma))
dma               268 drivers/infiniband/ulp/srp/ib_srp.c 	ib_dma_unmap_single(host->srp_dev->dev, iu->dma, iu->size,
dma              2079 drivers/infiniband/ulp/srp/ib_srp.c 	iu->sge[0].addr   = iu->dma;
dma              2101 drivers/infiniband/ulp/srp/ib_srp.c 	list.addr   = iu->dma;
dma              2202 drivers/infiniband/ulp/srp/ib_srp.c 	ib_dma_sync_single_for_cpu(dev, iu->dma, len, DMA_TO_DEVICE);
dma              2204 drivers/infiniband/ulp/srp/ib_srp.c 	ib_dma_sync_single_for_device(dev, iu->dma, len, DMA_TO_DEVICE);
dma              2262 drivers/infiniband/ulp/srp/ib_srp.c 	ib_dma_sync_single_for_cpu(dev, iu->dma, ch->max_ti_iu_len,
dma              2299 drivers/infiniband/ulp/srp/ib_srp.c 	ib_dma_sync_single_for_device(dev, iu->dma, ch->max_ti_iu_len,
dma              2374 drivers/infiniband/ulp/srp/ib_srp.c 	ib_dma_sync_single_for_cpu(dev, iu->dma, ch->max_it_iu_len,
dma              2411 drivers/infiniband/ulp/srp/ib_srp.c 	ib_dma_sync_single_for_device(dev, iu->dma, ch->max_it_iu_len,
dma              2939 drivers/infiniband/ulp/srp/ib_srp.c 	ib_dma_sync_single_for_cpu(dev, iu->dma, sizeof *tsk_mgmt,
dma              2956 drivers/infiniband/ulp/srp/ib_srp.c 	ib_dma_sync_single_for_device(dev, iu->dma, sizeof *tsk_mgmt,
dma               272 drivers/infiniband/ulp/srp/ib_srp.h 	u64			dma;
dma               669 drivers/infiniband/ulp/srpt/ib_srpt.c 	ioctx->dma = ib_dma_map_single(sdev->device, ioctx->buf,
dma               671 drivers/infiniband/ulp/srpt/ib_srpt.c 	if (ib_dma_mapping_error(sdev->device, ioctx->dma))
dma               698 drivers/infiniband/ulp/srpt/ib_srpt.c 	ib_dma_unmap_single(sdev->device, ioctx->dma,
dma               828 drivers/infiniband/ulp/srpt/ib_srpt.c 	list.addr = ioctx->ioctx.dma + ioctx->ioctx.offset;
dma              1628 drivers/infiniband/ulp/srpt/ib_srpt.c 				   recv_ioctx->ioctx.dma,
dma              2855 drivers/infiniband/ulp/srpt/ib_srpt.c 	ib_dma_sync_single_for_device(sdev->device, ioctx->ioctx.dma, resp_len,
dma              2858 drivers/infiniband/ulp/srpt/ib_srpt.c 	sge.addr = ioctx->ioctx.dma;
dma               177 drivers/infiniband/ulp/srpt/ib_srpt.h 	dma_addr_t		dma;
dma               238 drivers/iommu/io-pgtable-arm-v7s.c 	dma_addr_t dma;
dma               254 drivers/iommu/io-pgtable-arm-v7s.c 		dma = dma_map_single(dev, table, size, DMA_TO_DEVICE);
dma               255 drivers/iommu/io-pgtable-arm-v7s.c 		if (dma_mapping_error(dev, dma))
dma               262 drivers/iommu/io-pgtable-arm-v7s.c 		if (dma != phys)
dma               271 drivers/iommu/io-pgtable-arm-v7s.c 	dma_unmap_single(dev, dma, size, DMA_TO_DEVICE);
dma               236 drivers/iommu/io-pgtable-arm.c 	dma_addr_t dma;
dma               247 drivers/iommu/io-pgtable-arm.c 		dma = dma_map_single(dev, pages, size, DMA_TO_DEVICE);
dma               248 drivers/iommu/io-pgtable-arm.c 		if (dma_mapping_error(dev, dma))
dma               255 drivers/iommu/io-pgtable-arm.c 		if (dma != virt_to_phys(pages))
dma               263 drivers/iommu/io-pgtable-arm.c 	dma_unmap_single(dev, dma, size, DMA_TO_DEVICE);
dma               472 drivers/iommu/omap-iommu.c static void flush_iopte_range(struct device *dev, dma_addr_t dma,
dma               477 drivers/iommu/omap-iommu.c 	dma_sync_single_range_for_device(dev, dma, offset, size, DMA_TO_DEVICE);
dma               120 drivers/iommu/rockchip-iommu.c static inline void rk_table_flush(struct rk_iommu_domain *dom, dma_addr_t dma,
dma               125 drivers/iommu/rockchip-iommu.c 	dma_sync_single_for_device(dma_dev, dma, size, DMA_TO_DEVICE);
dma                94 drivers/iommu/tegra-smmu.c #define  SMMU_PTB_DATA_VALUE(dma, attr) ((dma) >> 12 | (attr))
dma                96 drivers/iommu/tegra-smmu.c #define SMMU_MK_PDE(dma, attr) ((dma) >> SMMU_PTE_SHIFT | (attr))
dma               172 drivers/iommu/tegra-smmu.c static inline void smmu_flush_ptc(struct tegra_smmu *smmu, dma_addr_t dma,
dma               181 drivers/iommu/tegra-smmu.c 		value = (dma >> 32) & SMMU_PTC_FLUSH_HI_MASK;
dma               188 drivers/iommu/tegra-smmu.c 	value = (dma + offset) | SMMU_PTC_FLUSH_TYPE_ADR;
dma               574 drivers/iommu/tegra-smmu.c 		dma_addr_t dma;
dma               580 drivers/iommu/tegra-smmu.c 		dma = dma_map_page(smmu->dev, page, 0, SMMU_SIZE_PT,
dma               582 drivers/iommu/tegra-smmu.c 		if (dma_mapping_error(smmu->dev, dma)) {
dma               587 drivers/iommu/tegra-smmu.c 		if (!smmu_dma_addr_valid(smmu, dma)) {
dma               588 drivers/iommu/tegra-smmu.c 			dma_unmap_page(smmu->dev, dma, SMMU_SIZE_PT,
dma               596 drivers/iommu/tegra-smmu.c 		tegra_smmu_set_pde(as, iova, SMMU_MK_PDE(dma, SMMU_PDE_ATTR |
dma               599 drivers/iommu/tegra-smmu.c 		*dmap = dma;
dma                72 drivers/isdn/hardware/mISDN/netjet.c 	dma_addr_t		dma;
dma               301 drivers/isdn/hardware/mISDN/netjet.c 					   &card->dma);
dma               306 drivers/isdn/hardware/mISDN/netjet.c 	if ((u64)card->dma > 0xffffffff) {
dma               325 drivers/isdn/hardware/mISDN/netjet.c 	card->send.dmastart = (u32)card->dma;
dma               343 drivers/isdn/hardware/mISDN/netjet.c 	card->recv.dmastart = (u32)card->dma  + (NJ_DMA_SIZE / 2);
dma               969 drivers/isdn/hardware/mISDN/netjet.c 				    card->dma_p, card->dma);
dma               123 drivers/media/common/b2c2/flexcop-common.h 		struct flexcop_dma *dma, u32 size);
dma               124 drivers/media/common/b2c2/flexcop-common.h void flexcop_dma_free(struct flexcop_dma *dma);
dma               130 drivers/media/common/b2c2/flexcop-common.h int flexcop_dma_config(struct flexcop_device *fc, struct flexcop_dma *dma,
dma                 8 drivers/media/common/btcx-risc.h 	dma_addr_t     dma;
dma               215 drivers/media/common/saa7146/saa7146_core.c 	pci_free_consistent(pci, pt->size, pt->cpu, pt->dma);
dma               230 drivers/media/common/saa7146/saa7146_core.c 	pt->dma  = dma_addr;
dma               501 drivers/media/common/saa7146/saa7146_core.c 		dma_addr_t dma;
dma               522 drivers/media/common/saa7146/saa7146_core.c 		pci_free_consistent(pdev, SAA7146_RPS_MEM, p->addr, p->dma);
dma                55 drivers/media/common/saa7146/saa7146_fops.c 	struct videobuf_dmabuf *dma=videobuf_to_dma(&buf->vb);
dma                61 drivers/media/common/saa7146/saa7146_fops.c 	videobuf_dma_unmap(q->dev, dma);
dma                62 drivers/media/common/saa7146/saa7146_fops.c 	videobuf_dma_free(dma);
dma               725 drivers/media/common/saa7146/saa7146_hlp.c 	vdma1.base_page		= buf->pt[0].dma | ME1 | sfmt->swap;
dma               838 drivers/media/common/saa7146/saa7146_hlp.c 	BUG_ON(0 == buf->pt[0].dma);
dma               839 drivers/media/common/saa7146/saa7146_hlp.c 	BUG_ON(0 == buf->pt[1].dma);
dma               840 drivers/media/common/saa7146/saa7146_hlp.c 	BUG_ON(0 == buf->pt[2].dma);
dma               853 drivers/media/common/saa7146/saa7146_hlp.c 	vdma1.base_page		= buf->pt[0].dma | ME1;
dma               866 drivers/media/common/saa7146/saa7146_hlp.c 	vdma2.base_page		= buf->pt[1].dma | ME1;
dma               869 drivers/media/common/saa7146/saa7146_hlp.c 	vdma3.base_page		= buf->pt[2].dma | ME1;
dma               157 drivers/media/common/saa7146/saa7146_vbi.c 	vdma3.base_page	= buf->pt[2].dma | ME1;
dma               244 drivers/media/common/saa7146/saa7146_vbi.c 		struct videobuf_dmabuf *dma=videobuf_to_dma(&buf->vb);
dma               258 drivers/media/common/saa7146/saa7146_vbi.c 						 dma->sglist, dma->sglen);
dma               199 drivers/media/common/saa7146/saa7146_video.c 	struct videobuf_dmabuf *dma=videobuf_to_dma(&buf->vb);
dma               200 drivers/media/common/saa7146/saa7146_video.c 	struct scatterlist *list = dma->sglist;
dma               201 drivers/media/common/saa7146/saa7146_video.c 	int length = dma->sglen;
dma               263 drivers/media/common/saa7146/saa7146_video.c 		pt1->offset = dma->sglist->offset;
dma                10 drivers/media/pci/b2c2/flexcop-dma.c 		struct flexcop_dma *dma, u32 size)
dma                22 drivers/media/pci/b2c2/flexcop-dma.c 		dma->pdev = pdev;
dma                23 drivers/media/pci/b2c2/flexcop-dma.c 		dma->cpu_addr0 = tcpu;
dma                24 drivers/media/pci/b2c2/flexcop-dma.c 		dma->dma_addr0 = tdma;
dma                25 drivers/media/pci/b2c2/flexcop-dma.c 		dma->cpu_addr1 = tcpu + size/2;
dma                26 drivers/media/pci/b2c2/flexcop-dma.c 		dma->dma_addr1 = tdma + size/2;
dma                27 drivers/media/pci/b2c2/flexcop-dma.c 		dma->size = size/2;
dma                34 drivers/media/pci/b2c2/flexcop-dma.c void flexcop_dma_free(struct flexcop_dma *dma)
dma                36 drivers/media/pci/b2c2/flexcop-dma.c 	pci_free_consistent(dma->pdev, dma->size*2,
dma                37 drivers/media/pci/b2c2/flexcop-dma.c 			dma->cpu_addr0, dma->dma_addr0);
dma                38 drivers/media/pci/b2c2/flexcop-dma.c 	memset(dma, 0, sizeof(struct flexcop_dma));
dma                43 drivers/media/pci/b2c2/flexcop-dma.c 		struct flexcop_dma *dma,
dma                49 drivers/media/pci/b2c2/flexcop-dma.c 	v0x0.dma_0x0.dma_address0 = dma->dma_addr0 >> 2;
dma                50 drivers/media/pci/b2c2/flexcop-dma.c 	v0xc.dma_0xc.dma_address1 = dma->dma_addr1 >> 2;
dma                51 drivers/media/pci/b2c2/flexcop-dma.c 	v0x4.dma_0x4_write.dma_addr_size = dma->size / 4;
dma                58 drivers/media/pci/b2c2/flexcop-pci.c 	struct flexcop_dma dma[2];
dma               173 drivers/media/pci/b2c2/flexcop-pci.c 					fc_pci->dma[0].cpu_addr0,
dma               174 drivers/media/pci/b2c2/flexcop-pci.c 					fc_pci->dma[0].size / 188);
dma               177 drivers/media/pci/b2c2/flexcop-pci.c 					fc_pci->dma[0].cpu_addr1,
dma               178 drivers/media/pci/b2c2/flexcop-pci.c 					fc_pci->dma[0].size / 188);
dma               187 drivers/media/pci/b2c2/flexcop-pci.c 		u32 cur_pos = cur_addr - fc_pci->dma[0].dma_addr0;
dma               200 drivers/media/pci/b2c2/flexcop-pci.c 				(fc_pci->dma[0].size*2 - 1) -
dma               203 drivers/media/pci/b2c2/flexcop-pci.c 				fc_pci->dma[0].cpu_addr0 +
dma               205 drivers/media/pci/b2c2/flexcop-pci.c 				(fc_pci->dma[0].size*2) -
dma               214 drivers/media/pci/b2c2/flexcop-pci.c 				fc_pci->dma[0].cpu_addr0 +
dma               236 drivers/media/pci/b2c2/flexcop-pci.c 		flexcop_dma_config(fc, &fc_pci->dma[0], FC_DMA_1);
dma               237 drivers/media/pci/b2c2/flexcop-pci.c 		flexcop_dma_config(fc, &fc_pci->dma[1], FC_DMA_2);
dma               261 drivers/media/pci/b2c2/flexcop-pci.c 	ret = flexcop_dma_allocate(fc_pci->pdev, &fc_pci->dma[0],
dma               266 drivers/media/pci/b2c2/flexcop-pci.c 	ret = flexcop_dma_allocate(fc_pci->pdev, &fc_pci->dma[1],
dma               269 drivers/media/pci/b2c2/flexcop-pci.c 		flexcop_dma_free(&fc_pci->dma[0]);
dma               284 drivers/media/pci/b2c2/flexcop-pci.c 		flexcop_dma_free(&fc_pci->dma[0]);
dma               285 drivers/media/pci/b2c2/flexcop-pci.c 		flexcop_dma_free(&fc_pci->dma[1]);
dma                49 drivers/media/pci/bt8xx/btcx-risc.c 		memcnt, (unsigned long)risc->dma);
dma                51 drivers/media/pci/bt8xx/btcx-risc.c 	pci_free_consistent(pci, risc->size, risc->cpu, risc->dma);
dma                60 drivers/media/pci/bt8xx/btcx-risc.c 	dma_addr_t dma = 0;
dma                65 drivers/media/pci/bt8xx/btcx-risc.c 		cpu = pci_alloc_consistent(pci, size, &dma);
dma                69 drivers/media/pci/bt8xx/btcx-risc.c 		risc->dma  = dma;
dma                74 drivers/media/pci/bt8xx/btcx-risc.c 			memcnt, (unsigned long)dma, cpu, size);
dma                 6 drivers/media/pci/bt8xx/btcx-risc.h 	dma_addr_t     dma;
dma              3371 drivers/media/pci/bt8xx/bttv-driver.c 		btv->c.v4l2_dev.name, risc->cpu, (unsigned long)risc->dma);
dma              3375 drivers/media/pci/bt8xx/bttv-driver.c 			(unsigned long)(risc->dma + (i<<2)));
dma              3380 drivers/media/pci/bt8xx/bttv-driver.c 				(unsigned long)(risc->dma + ((i+j)<<2)),
dma              3389 drivers/media/pci/bt8xx/bttv-driver.c 	pr_info("  main: %08llx\n", (unsigned long long)btv->main.dma);
dma              3391 drivers/media/pci/bt8xx/bttv-driver.c 		btv->cvbi ? (unsigned long long)btv->cvbi->top.dma : 0,
dma              3392 drivers/media/pci/bt8xx/bttv-driver.c 		btv->cvbi ? (unsigned long long)btv->cvbi->bottom.dma : 0);
dma              3395 drivers/media/pci/bt8xx/bttv-driver.c 		? (unsigned long long)btv->curr.top->top.dma : 0,
dma              3397 drivers/media/pci/bt8xx/bttv-driver.c 		? (unsigned long long)btv->curr.bottom->bottom.dma : 0);
dma              3399 drivers/media/pci/bt8xx/bttv-driver.c 		btv->screen ? (unsigned long long)btv->screen->top.dma : 0,
dma              3400 drivers/media/pci/bt8xx/bttv-driver.c 		btv->screen ? (unsigned long long)btv->screen->bottom.dma : 0);
dma              3446 drivers/media/pci/bt8xx/bttv-driver.c 		(unsigned long)btv->main.dma,
dma              3670 drivers/media/pci/bt8xx/bttv-driver.c 	if (rc < risc->dma)
dma              3672 drivers/media/pci/bt8xx/bttv-driver.c 	if (rc > risc->dma + risc->size)
dma               470 drivers/media/pci/bt8xx/bttv-risc.c 		 btv->cvbi         ? (unsigned long long)btv->cvbi->top.dma            : 0,
dma               471 drivers/media/pci/bt8xx/bttv-risc.c 		 btv->curr.top     ? (unsigned long long)btv->curr.top->top.dma        : 0,
dma               472 drivers/media/pci/bt8xx/bttv-risc.c 		 btv->cvbi         ? (unsigned long long)btv->cvbi->bottom.dma         : 0,
dma               473 drivers/media/pci/bt8xx/bttv-risc.c 		 btv->curr.bottom  ? (unsigned long long)btv->curr.bottom->bottom.dma  : 0);
dma               492 drivers/media/pci/bt8xx/bttv-risc.c 		btwrite(btv->main.dma, BT848_RISC_STRT_ADD);
dma               512 drivers/media/pci/bt8xx/bttv-risc.c 		btv->c.nr, (unsigned long long)btv->main.dma);
dma               518 drivers/media/pci/bt8xx/bttv-risc.c 	btv->main.cpu[3] = cpu_to_le32(btv->main.dma + (4<<2));
dma               522 drivers/media/pci/bt8xx/bttv-risc.c 	btv->main.cpu[5] = cpu_to_le32(btv->main.dma + (6<<2));
dma               524 drivers/media/pci/bt8xx/bttv-risc.c 	btv->main.cpu[7] = cpu_to_le32(btv->main.dma + (8<<2));
dma               532 drivers/media/pci/bt8xx/bttv-risc.c 	btv->main.cpu[11] = cpu_to_le32(btv->main.dma + (12<<2));
dma               534 drivers/media/pci/bt8xx/bttv-risc.c 	btv->main.cpu[13] = cpu_to_le32(btv->main.dma + (14<<2));
dma               538 drivers/media/pci/bt8xx/bttv-risc.c 	btv->main.cpu[15] = cpu_to_le32(btv->main.dma + (0<<2));
dma               548 drivers/media/pci/bt8xx/bttv-risc.c 	unsigned long next = btv->main.dma + ((slot+2) << 2);
dma               556 drivers/media/pci/bt8xx/bttv-risc.c 			 (unsigned long long)risc->dma, irqflags);
dma               565 drivers/media/pci/bt8xx/bttv-risc.c 		btv->main.cpu[slot+1] = cpu_to_le32(risc->dma);
dma               573 drivers/media/pci/bt8xx/bttv-risc.c 	struct videobuf_dmabuf *dma=videobuf_to_dma(&buf->vb);
dma               577 drivers/media/pci/bt8xx/bttv-risc.c 	videobuf_dma_unmap(q->dev, dma);
dma               578 drivers/media/pci/bt8xx/bttv-risc.c 	videobuf_dma_free(dma);
dma               700 drivers/media/pci/bt8xx/bttv-risc.c 	struct videobuf_dmabuf *dma=videobuf_to_dma(&buf->vb);
dma               717 drivers/media/pci/bt8xx/bttv-risc.c 			bttv_risc_packed(btv,&buf->top,dma->sglist,
dma               723 drivers/media/pci/bt8xx/bttv-risc.c 			bttv_risc_packed(btv,&buf->bottom,dma->sglist,
dma               727 drivers/media/pci/bt8xx/bttv-risc.c 			bttv_risc_packed(btv,&buf->top,dma->sglist,
dma               729 drivers/media/pci/bt8xx/bttv-risc.c 			bttv_risc_packed(btv,&buf->bottom,dma->sglist,
dma               733 drivers/media/pci/bt8xx/bttv-risc.c 			bttv_risc_packed(btv,&buf->top,dma->sglist,
dma               735 drivers/media/pci/bt8xx/bttv-risc.c 			bttv_risc_packed(btv,&buf->bottom,dma->sglist,
dma               768 drivers/media/pci/bt8xx/bttv-risc.c 			bttv_risc_planar(btv, &buf->top, dma->sglist,
dma               777 drivers/media/pci/bt8xx/bttv-risc.c 			bttv_risc_planar(btv, &buf->bottom, dma->sglist,
dma               790 drivers/media/pci/bt8xx/bttv-risc.c 					 dma->sglist,
dma               797 drivers/media/pci/bt8xx/bttv-risc.c 					 dma->sglist,
dma               813 drivers/media/pci/bt8xx/bttv-risc.c 					 dma->sglist,
dma               821 drivers/media/pci/bt8xx/bttv-risc.c 					 dma->sglist,
dma               840 drivers/media/pci/bt8xx/bttv-risc.c 		bttv_risc_packed(btv, &buf->top,  dma->sglist,
dma               843 drivers/media/pci/bt8xx/bttv-risc.c 		bttv_risc_packed(btv, &buf->bottom, dma->sglist,
dma               147 drivers/media/pci/bt8xx/bttv-vbi.c 		struct videobuf_dmabuf *dma=videobuf_to_dma(&buf->vb);
dma               154 drivers/media/pci/bt8xx/bttv-vbi.c 					      dma->sglist,
dma               166 drivers/media/pci/bt8xx/bttv-vbi.c 					      dma->sglist,
dma               373 drivers/media/pci/cx18/cx18-driver.h 	int dma;		/* can be PCI_DMA_TODEVICE,
dma               322 drivers/media/pci/cx18/cx18-queue.c 	int dma = s->dma;
dma               329 drivers/media/pci/cx18/cx18-queue.c 					       buf_size, dma);
dma               389 drivers/media/pci/cx18/cx18-queue.c 				buf->buf, s->buf_size, s->dma);
dma               423 drivers/media/pci/cx18/cx18-queue.c 				s->buf_size, s->dma);
dma                19 drivers/media/pci/cx18/cx18-queue.h 				s->buf_size, s->dma);
dma                26 drivers/media/pci/cx18/cx18-queue.h 				s->buf_size, s->dma);
dma                46 drivers/media/pci/cx18/cx18-streams.c 	int dma;
dma               254 drivers/media/pci/cx18/cx18-streams.c 	s->dma = cx18_stream_info[type].dma;
dma               327 drivers/media/pci/cx18/cx18-streams.c 	if (cx18_stream_info[type].dma != PCI_DMA_NONE &&
dma               164 drivers/media/pci/cx23885/cx23885-alsa.c 		buf->risc.dma);
dma               268 drivers/media/pci/cx23885/cx23885-alsa.c 	pci_free_consistent(chip->pci, risc->size, risc->cpu, risc->dma);
dma               399 drivers/media/pci/cx23885/cx23885-alsa.c 	buf->risc.jmp[1] = cpu_to_le32(buf->risc.dma);
dma               595 drivers/media/pci/cx23885/cx23885-core.c 	       dev->name, risc->cpu, (unsigned long)risc->dma);
dma              1221 drivers/media/pci/cx23885/cx23885-core.c 	risc->cpu = pci_alloc_consistent(pci, risc->size, &risc->dma);
dma              1258 drivers/media/pci/cx23885/cx23885-core.c 	risc->cpu = pci_alloc_consistent(pci, risc->size, &risc->dma);
dma              1296 drivers/media/pci/cx23885/cx23885-core.c 	risc->cpu = pci_alloc_consistent(pci, risc->size, &risc->dma);
dma              1326 drivers/media/pci/cx23885/cx23885-core.c 	pci_free_consistent(dev->pci, risc->size, risc->cpu, risc->dma);
dma              1412 drivers/media/pci/cx23885/cx23885-core.c 				   port->ts_packet_size, buf->risc.dma);
dma              1619 drivers/media/pci/cx23885/cx23885-core.c 	buf->risc.cpu[1] = cpu_to_le32(buf->risc.dma + 12);
dma              1621 drivers/media/pci/cx23885/cx23885-core.c 	buf->risc.jmp[1] = cpu_to_le32(buf->risc.dma + 12);
dma              1634 drivers/media/pci/cx23885/cx23885-core.c 		prev->risc.jmp[1] = cpu_to_le32(buf->risc.dma);
dma              1657 drivers/media/pci/cx23885/cx23885-core.c 			(unsigned long)buf->risc.dma);
dma                94 drivers/media/pci/cx23885/cx23885-vbi.c 				VBI_LINE_LENGTH, buf->risc.dma);
dma               192 drivers/media/pci/cx23885/cx23885-vbi.c 	buf->risc.cpu[1] = cpu_to_le32(buf->risc.dma + 12);
dma               194 drivers/media/pci/cx23885/cx23885-vbi.c 	buf->risc.jmp[1] = cpu_to_le32(buf->risc.dma + 12);
dma               211 drivers/media/pci/cx23885/cx23885-vbi.c 		prev->risc.jmp[1] = cpu_to_le32(buf->risc.dma);
dma               312 drivers/media/pci/cx23885/cx23885-video.c 				buf->bpl, buf->risc.dma);
dma               417 drivers/media/pci/cx23885/cx23885-video.c 		(unsigned long)buf->risc.dma);
dma               462 drivers/media/pci/cx23885/cx23885-video.c 	buf->risc.cpu[1] = cpu_to_le32(buf->risc.dma + 12);
dma               464 drivers/media/pci/cx23885/cx23885-video.c 	buf->risc.jmp[1] = cpu_to_le32(buf->risc.dma + 12);
dma               477 drivers/media/pci/cx23885/cx23885-video.c 		prev->risc.jmp[1] = cpu_to_le32(buf->risc.dma);
dma               167 drivers/media/pci/cx23885/cx23885.h 	dma_addr_t     dma;
dma               231 drivers/media/pci/cx25821/cx25821-alsa.c 					 buf->risc.dma);
dma               405 drivers/media/pci/cx25821/cx25821-alsa.c 	pci_free_consistent(chip->pci, risc->size, risc->cpu, risc->dma);
dma               543 drivers/media/pci/cx25821/cx25821-alsa.c 	buf->risc.jmp[1] = cpu_to_le32(buf->risc.dma);
dma               977 drivers/media/pci/cx25821/cx25821-core.c 	dma_addr_t dma = 0;
dma               980 drivers/media/pci/cx25821/cx25821-core.c 		pci_free_consistent(pci, risc->size, risc->cpu, risc->dma);
dma               982 drivers/media/pci/cx25821/cx25821-core.c 		cpu = pci_zalloc_consistent(pci, size, &dma);
dma               986 drivers/media/pci/cx25821/cx25821-core.c 		risc->dma  = dma;
dma              1205 drivers/media/pci/cx25821/cx25821-core.c 			buf->risc.size, buf->risc.cpu, buf->risc.dma);
dma                66 drivers/media/pci/cx25821/cx25821-video.c 	cx25821_sram_channel_setup(dev, channel, buf->bpl, buf->risc.dma);
dma               219 drivers/media/pci/cx25821/cx25821-video.c 		(unsigned long)buf->risc.dma);
dma               245 drivers/media/pci/cx25821/cx25821-video.c 	buf->risc.cpu[1] = cpu_to_le32(buf->risc.dma + 12);
dma               247 drivers/media/pci/cx25821/cx25821-video.c 	buf->risc.jmp[1] = cpu_to_le32(buf->risc.dma + 12);
dma               257 drivers/media/pci/cx25821/cx25821-video.c 		prev->risc.jmp[1] = cpu_to_le32(buf->risc.dma);
dma               108 drivers/media/pci/cx25821/cx25821.h 	dma_addr_t     dma;
dma               124 drivers/media/pci/cx88/cx88-alsa.c 	cx88_sram_channel_setup(chip->core, audio_ch, buf->bpl, buf->risc.dma);
dma               361 drivers/media/pci/cx88/cx88-alsa.c 				    risc->cpu, risc->dma);
dma               491 drivers/media/pci/cx88/cx88-alsa.c 	buf->risc.jmp[1] = cpu_to_le32(buf->risc.dma);
dma               688 drivers/media/pci/cx88/cx88-blackbird.c 		pci_free_consistent(dev->pci, risc->size, risc->cpu, risc->dma);
dma               154 drivers/media/pci/cx88/cx88-core.c 	risc->dma = 0;
dma               155 drivers/media/pci/cx88/cx88-core.c 	risc->cpu = pci_zalloc_consistent(pci, risc->size, &risc->dma);
dma               192 drivers/media/pci/cx88/cx88-core.c 	risc->dma = 0;
dma               193 drivers/media/pci/cx88/cx88-core.c 	risc->cpu = pci_zalloc_consistent(pci, risc->size, &risc->dma);
dma               106 drivers/media/pci/cx88/cx88-dvb.c 		pci_free_consistent(dev->pci, risc->size, risc->cpu, risc->dma);
dma                83 drivers/media/pci/cx88/cx88-mpeg.c 				dev->ts_packet_size, buf->risc.dma);
dma               230 drivers/media/pci/cx88/cx88-mpeg.c 					    risc->cpu, risc->dma);
dma               245 drivers/media/pci/cx88/cx88-mpeg.c 	buf->risc.cpu[1] = cpu_to_le32(buf->risc.dma + 8);
dma               247 drivers/media/pci/cx88/cx88-mpeg.c 	buf->risc.jmp[1] = cpu_to_le32(buf->risc.dma + 8);
dma               260 drivers/media/pci/cx88/cx88-mpeg.c 		prev->risc.jmp[1] = cpu_to_le32(buf->risc.dma);
dma                59 drivers/media/pci/cx88/cx88-vbi.c 				VBI_LINE_LENGTH, buf->risc.dma);
dma               162 drivers/media/pci/cx88/cx88-vbi.c 		pci_free_consistent(dev->pci, risc->size, risc->cpu, risc->dma);
dma               175 drivers/media/pci/cx88/cx88-vbi.c 	buf->risc.cpu[1] = cpu_to_le32(buf->risc.dma + 8);
dma               177 drivers/media/pci/cx88/cx88-vbi.c 	buf->risc.jmp[1] = cpu_to_le32(buf->risc.dma + 8);
dma               188 drivers/media/pci/cx88/cx88-vbi.c 		prev->risc.jmp[1] = cpu_to_le32(buf->risc.dma);
dma               357 drivers/media/pci/cx88/cx88-video.c 				buf->bpl, buf->risc.dma);
dma               485 drivers/media/pci/cx88/cx88-video.c 		(unsigned long)buf->risc.dma);
dma               497 drivers/media/pci/cx88/cx88-video.c 		pci_free_consistent(dev->pci, risc->size, risc->cpu, risc->dma);
dma               510 drivers/media/pci/cx88/cx88-video.c 	buf->risc.cpu[1] = cpu_to_le32(buf->risc.dma + 8);
dma               512 drivers/media/pci/cx88/cx88-video.c 	buf->risc.jmp[1] = cpu_to_le32(buf->risc.dma + 8);
dma               523 drivers/media/pci/cx88/cx88-video.c 		prev->risc.jmp[1] = cpu_to_le32(buf->risc.dma);
dma               309 drivers/media/pci/cx88/cx88.h 	dma_addr_t     dma;
dma               135 drivers/media/pci/ddbridge/ddbridge-core.c 	struct ddb_dma *dma = io->dma;
dma               139 drivers/media/pci/ddbridge/ddbridge-core.c 	if (!dma)
dma               141 drivers/media/pci/ddbridge/ddbridge-core.c 	for (i = 0; i < dma->num; i++) {
dma               142 drivers/media/pci/ddbridge/ddbridge-core.c 		mem = dma->pbuf[i];
dma               143 drivers/media/pci/ddbridge/ddbridge-core.c 		ddbwritel(dev, mem & 0xffffffff, dma->bufregs + i * 8);
dma               144 drivers/media/pci/ddbridge/ddbridge-core.c 		ddbwritel(dev, mem >> 32, dma->bufregs + i * 8 + 4);
dma               146 drivers/media/pci/ddbridge/ddbridge-core.c 	dma->bufval = ((dma->div & 0x0f) << 16) |
dma               147 drivers/media/pci/ddbridge/ddbridge-core.c 		((dma->num & 0x1f) << 11) |
dma               148 drivers/media/pci/ddbridge/ddbridge-core.c 		((dma->size >> 7) & 0x7ff);
dma               194 drivers/media/pci/ddbridge/ddbridge-core.c 	if (port->output->dma->running) {
dma               210 drivers/media/pci/ddbridge/ddbridge-core.c 						 oredi->dma, iredo->dma);
dma               253 drivers/media/pci/ddbridge/ddbridge-core.c 	if (port->output->dma->running || input->dma->running) {
dma               269 drivers/media/pci/ddbridge/ddbridge-core.c 	ddb_redirect_dma(input->port->dev, input->dma, port->output->dma);
dma               278 drivers/media/pci/ddbridge/ddbridge-core.c static void dma_free(struct pci_dev *pdev, struct ddb_dma *dma, int dir)
dma               282 drivers/media/pci/ddbridge/ddbridge-core.c 	if (!dma)
dma               284 drivers/media/pci/ddbridge/ddbridge-core.c 	for (i = 0; i < dma->num; i++) {
dma               285 drivers/media/pci/ddbridge/ddbridge-core.c 		if (dma->vbuf[i]) {
dma               287 drivers/media/pci/ddbridge/ddbridge-core.c 				dma_unmap_single(&pdev->dev, dma->pbuf[i],
dma               288 drivers/media/pci/ddbridge/ddbridge-core.c 						 dma->size,
dma               291 drivers/media/pci/ddbridge/ddbridge-core.c 				kfree(dma->vbuf[i]);
dma               292 drivers/media/pci/ddbridge/ddbridge-core.c 				dma->vbuf[i] = NULL;
dma               294 drivers/media/pci/ddbridge/ddbridge-core.c 				dma_free_coherent(&pdev->dev, dma->size,
dma               295 drivers/media/pci/ddbridge/ddbridge-core.c 						  dma->vbuf[i], dma->pbuf[i]);
dma               298 drivers/media/pci/ddbridge/ddbridge-core.c 			dma->vbuf[i] = NULL;
dma               303 drivers/media/pci/ddbridge/ddbridge-core.c static int dma_alloc(struct pci_dev *pdev, struct ddb_dma *dma, int dir)
dma               307 drivers/media/pci/ddbridge/ddbridge-core.c 	if (!dma)
dma               309 drivers/media/pci/ddbridge/ddbridge-core.c 	for (i = 0; i < dma->num; i++) {
dma               311 drivers/media/pci/ddbridge/ddbridge-core.c 			dma->vbuf[i] = kmalloc(dma->size, __GFP_RETRY_MAYFAIL);
dma               312 drivers/media/pci/ddbridge/ddbridge-core.c 			if (!dma->vbuf[i])
dma               314 drivers/media/pci/ddbridge/ddbridge-core.c 			dma->pbuf[i] = dma_map_single(&pdev->dev,
dma               315 drivers/media/pci/ddbridge/ddbridge-core.c 						      dma->vbuf[i],
dma               316 drivers/media/pci/ddbridge/ddbridge-core.c 						      dma->size,
dma               319 drivers/media/pci/ddbridge/ddbridge-core.c 			if (dma_mapping_error(&pdev->dev, dma->pbuf[i])) {
dma               320 drivers/media/pci/ddbridge/ddbridge-core.c 				kfree(dma->vbuf[i]);
dma               321 drivers/media/pci/ddbridge/ddbridge-core.c 				dma->vbuf[i] = NULL;
dma               325 drivers/media/pci/ddbridge/ddbridge-core.c 			dma->vbuf[i] = dma_alloc_coherent(&pdev->dev,
dma               326 drivers/media/pci/ddbridge/ddbridge-core.c 							  dma->size,
dma               327 drivers/media/pci/ddbridge/ddbridge-core.c 							  &dma->pbuf[i],
dma               329 drivers/media/pci/ddbridge/ddbridge-core.c 			if (!dma->vbuf[i])
dma               345 drivers/media/pci/ddbridge/ddbridge-core.c 			if (port->input[0]->dma)
dma               346 drivers/media/pci/ddbridge/ddbridge-core.c 				if (dma_alloc(dev->pdev, port->input[0]->dma, 0)
dma               349 drivers/media/pci/ddbridge/ddbridge-core.c 			if (port->input[1]->dma)
dma               350 drivers/media/pci/ddbridge/ddbridge-core.c 				if (dma_alloc(dev->pdev, port->input[1]->dma, 0)
dma               356 drivers/media/pci/ddbridge/ddbridge-core.c 			if (port->input[0]->dma)
dma               357 drivers/media/pci/ddbridge/ddbridge-core.c 				if (dma_alloc(dev->pdev, port->input[0]->dma, 0)
dma               360 drivers/media/pci/ddbridge/ddbridge-core.c 			if (port->output->dma)
dma               361 drivers/media/pci/ddbridge/ddbridge-core.c 				if (dma_alloc(dev->pdev, port->output->dma, 1)
dma               381 drivers/media/pci/ddbridge/ddbridge-core.c 		if (port->input[0] && port->input[0]->dma)
dma               382 drivers/media/pci/ddbridge/ddbridge-core.c 			dma_free(dev->pdev, port->input[0]->dma, 0);
dma               383 drivers/media/pci/ddbridge/ddbridge-core.c 		if (port->input[1] && port->input[1]->dma)
dma               384 drivers/media/pci/ddbridge/ddbridge-core.c 			dma_free(dev->pdev, port->input[1]->dma, 0);
dma               385 drivers/media/pci/ddbridge/ddbridge-core.c 		if (port->output && port->output->dma)
dma               386 drivers/media/pci/ddbridge/ddbridge-core.c 			dma_free(dev->pdev, port->output->dma, 1);
dma               462 drivers/media/pci/ddbridge/ddbridge-core.c 	spin_lock_irq(&output->dma->lock);
dma               463 drivers/media/pci/ddbridge/ddbridge-core.c 	output->dma->cbuf = 0;
dma               464 drivers/media/pci/ddbridge/ddbridge-core.c 	output->dma->coff = 0;
dma               465 drivers/media/pci/ddbridge/ddbridge-core.c 	output->dma->stat = 0;
dma               466 drivers/media/pci/ddbridge/ddbridge-core.c 	ddbwritel(dev, 0, DMA_BUFFER_CONTROL(output->dma));
dma               479 drivers/media/pci/ddbridge/ddbridge-core.c 	ddbwritel(dev, output->dma->bufval,
dma               480 drivers/media/pci/ddbridge/ddbridge-core.c 		  DMA_BUFFER_SIZE(output->dma));
dma               481 drivers/media/pci/ddbridge/ddbridge-core.c 	ddbwritel(dev, 0, DMA_BUFFER_ACK(output->dma));
dma               483 drivers/media/pci/ddbridge/ddbridge-core.c 	ddbwritel(dev, 7, DMA_BUFFER_CONTROL(output->dma));
dma               487 drivers/media/pci/ddbridge/ddbridge-core.c 	output->dma->running = 1;
dma               488 drivers/media/pci/ddbridge/ddbridge-core.c 	spin_unlock_irq(&output->dma->lock);
dma               495 drivers/media/pci/ddbridge/ddbridge-core.c 	spin_lock_irq(&output->dma->lock);
dma               499 drivers/media/pci/ddbridge/ddbridge-core.c 	ddbwritel(dev, 0, DMA_BUFFER_CONTROL(output->dma));
dma               500 drivers/media/pci/ddbridge/ddbridge-core.c 	output->dma->running = 0;
dma               501 drivers/media/pci/ddbridge/ddbridge-core.c 	spin_unlock_irq(&output->dma->lock);
dma               509 drivers/media/pci/ddbridge/ddbridge-core.c 	spin_lock_irq(&input->dma->lock);
dma               513 drivers/media/pci/ddbridge/ddbridge-core.c 	ddbwritel(dev, 0, DMA_BUFFER_CONTROL(input->dma));
dma               514 drivers/media/pci/ddbridge/ddbridge-core.c 	input->dma->running = 0;
dma               515 drivers/media/pci/ddbridge/ddbridge-core.c 	spin_unlock_irq(&input->dma->lock);
dma               522 drivers/media/pci/ddbridge/ddbridge-core.c 	spin_lock_irq(&input->dma->lock);
dma               523 drivers/media/pci/ddbridge/ddbridge-core.c 	input->dma->cbuf = 0;
dma               524 drivers/media/pci/ddbridge/ddbridge-core.c 	input->dma->coff = 0;
dma               525 drivers/media/pci/ddbridge/ddbridge-core.c 	input->dma->stat = 0;
dma               526 drivers/media/pci/ddbridge/ddbridge-core.c 	ddbwritel(dev, 0, DMA_BUFFER_CONTROL(input->dma));
dma               532 drivers/media/pci/ddbridge/ddbridge-core.c 	ddbwritel(dev, input->dma->bufval,
dma               533 drivers/media/pci/ddbridge/ddbridge-core.c 		  DMA_BUFFER_SIZE(input->dma));
dma               534 drivers/media/pci/ddbridge/ddbridge-core.c 	ddbwritel(dev, 0, DMA_BUFFER_ACK(input->dma));
dma               536 drivers/media/pci/ddbridge/ddbridge-core.c 	ddbwritel(dev, 3, DMA_BUFFER_CONTROL(input->dma));
dma               543 drivers/media/pci/ddbridge/ddbridge-core.c 	input->dma->running = 1;
dma               544 drivers/media/pci/ddbridge/ddbridge-core.c 	spin_unlock_irq(&input->dma->lock);
dma               581 drivers/media/pci/ddbridge/ddbridge-core.c 	u32 idx, off, stat = output->dma->stat;
dma               587 drivers/media/pci/ddbridge/ddbridge-core.c 	if (output->dma->cbuf != idx) {
dma               588 drivers/media/pci/ddbridge/ddbridge-core.c 		if ((((output->dma->cbuf + 1) % output->dma->num) == idx) &&
dma               589 drivers/media/pci/ddbridge/ddbridge-core.c 		    (output->dma->size - output->dma->coff <= (2 * 188)))
dma               593 drivers/media/pci/ddbridge/ddbridge-core.c 	diff = off - output->dma->coff;
dma               603 drivers/media/pci/ddbridge/ddbridge-core.c 	u32 idx, off, stat = output->dma->stat;
dma               610 drivers/media/pci/ddbridge/ddbridge-core.c 		len = output->dma->size - output->dma->coff;
dma               611 drivers/media/pci/ddbridge/ddbridge-core.c 		if ((((output->dma->cbuf + 1) % output->dma->num) == idx) &&
dma               617 drivers/media/pci/ddbridge/ddbridge-core.c 		if (output->dma->cbuf == idx) {
dma               618 drivers/media/pci/ddbridge/ddbridge-core.c 			if (off > output->dma->coff) {
dma               619 drivers/media/pci/ddbridge/ddbridge-core.c 				len = off - output->dma->coff;
dma               628 drivers/media/pci/ddbridge/ddbridge-core.c 		if (copy_from_user(output->dma->vbuf[output->dma->cbuf] +
dma               629 drivers/media/pci/ddbridge/ddbridge-core.c 				   output->dma->coff,
dma               635 drivers/media/pci/ddbridge/ddbridge-core.c 				output->dma->pbuf[output->dma->cbuf],
dma               636 drivers/media/pci/ddbridge/ddbridge-core.c 				output->dma->size, DMA_TO_DEVICE);
dma               639 drivers/media/pci/ddbridge/ddbridge-core.c 		output->dma->coff += len;
dma               640 drivers/media/pci/ddbridge/ddbridge-core.c 		if (output->dma->coff == output->dma->size) {
dma               641 drivers/media/pci/ddbridge/ddbridge-core.c 			output->dma->coff = 0;
dma               642 drivers/media/pci/ddbridge/ddbridge-core.c 			output->dma->cbuf = ((output->dma->cbuf + 1) %
dma               643 drivers/media/pci/ddbridge/ddbridge-core.c 					     output->dma->num);
dma               646 drivers/media/pci/ddbridge/ddbridge-core.c 			  (output->dma->cbuf << 11) |
dma               647 drivers/media/pci/ddbridge/ddbridge-core.c 			  (output->dma->coff >> 7),
dma               648 drivers/media/pci/ddbridge/ddbridge-core.c 			  DMA_BUFFER_ACK(output->dma));
dma               656 drivers/media/pci/ddbridge/ddbridge-core.c 	u32 idx, off, stat = input->dma->stat;
dma               657 drivers/media/pci/ddbridge/ddbridge-core.c 	u32 ctrl = ddbreadl(dev, DMA_BUFFER_CONTROL(input->dma));
dma               664 drivers/media/pci/ddbridge/ddbridge-core.c 		ddbwritel(dev, stat, DMA_BUFFER_ACK(input->dma));
dma               667 drivers/media/pci/ddbridge/ddbridge-core.c 	if (input->dma->cbuf != idx)
dma               677 drivers/media/pci/ddbridge/ddbridge-core.c 	u32 idx, free, stat = input->dma->stat;
dma               683 drivers/media/pci/ddbridge/ddbridge-core.c 		if (input->dma->cbuf == idx)
dma               685 drivers/media/pci/ddbridge/ddbridge-core.c 		free = input->dma->size - input->dma->coff;
dma               691 drivers/media/pci/ddbridge/ddbridge-core.c 				input->dma->pbuf[input->dma->cbuf],
dma               692 drivers/media/pci/ddbridge/ddbridge-core.c 				input->dma->size, DMA_FROM_DEVICE);
dma               693 drivers/media/pci/ddbridge/ddbridge-core.c 		ret = copy_to_user(buf, input->dma->vbuf[input->dma->cbuf] +
dma               694 drivers/media/pci/ddbridge/ddbridge-core.c 				   input->dma->coff, free);
dma               697 drivers/media/pci/ddbridge/ddbridge-core.c 		input->dma->coff += free;
dma               698 drivers/media/pci/ddbridge/ddbridge-core.c 		if (input->dma->coff == input->dma->size) {
dma               699 drivers/media/pci/ddbridge/ddbridge-core.c 			input->dma->coff = 0;
dma               700 drivers/media/pci/ddbridge/ddbridge-core.c 			input->dma->cbuf = (input->dma->cbuf + 1) %
dma               701 drivers/media/pci/ddbridge/ddbridge-core.c 				input->dma->num;
dma               706 drivers/media/pci/ddbridge/ddbridge-core.c 			  (input->dma->cbuf << 11) | (input->dma->coff >> 7),
dma               707 drivers/media/pci/ddbridge/ddbridge-core.c 			  DMA_BUFFER_ACK(input->dma));
dma               731 drivers/media/pci/ddbridge/ddbridge-core.c 				    output->dma->wq,
dma               761 drivers/media/pci/ddbridge/ddbridge-core.c 				    input->dma->wq,
dma               782 drivers/media/pci/ddbridge/ddbridge-core.c 	poll_wait(file, &input->dma->wq, wait);
dma               783 drivers/media/pci/ddbridge/ddbridge-core.c 	poll_wait(file, &output->dma->wq, wait);
dma              2122 drivers/media/pci/ddbridge/ddbridge-core.c 		  input->dma->stat, DMA_BUFFER_ACK(output->dma));
dma              2123 drivers/media/pci/ddbridge/ddbridge-core.c 	output->dma->cbuf = (input->dma->stat >> 11) & 0x1f;
dma              2124 drivers/media/pci/ddbridge/ddbridge-core.c 	output->dma->coff = (input->dma->stat & 0x7ff) << 7;
dma              2131 drivers/media/pci/ddbridge/ddbridge-core.c 		  output->dma->stat, DMA_BUFFER_ACK(input->dma));
dma              2138 drivers/media/pci/ddbridge/ddbridge-core.c 	struct ddb_dma *dma, *dma2;
dma              2142 drivers/media/pci/ddbridge/ddbridge-core.c 	dma = input->dma;
dma              2143 drivers/media/pci/ddbridge/ddbridge-core.c 	dma2 = input->dma;
dma              2149 drivers/media/pci/ddbridge/ddbridge-core.c 		dma2 = input->redo->dma;
dma              2152 drivers/media/pci/ddbridge/ddbridge-core.c 	while (dma->cbuf != ((dma->stat >> 11) & 0x1f) ||
dma              2153 drivers/media/pci/ddbridge/ddbridge-core.c 	       (4 & dma->ctrl)) {
dma              2154 drivers/media/pci/ddbridge/ddbridge-core.c 		if (4 & dma->ctrl) {
dma              2159 drivers/media/pci/ddbridge/ddbridge-core.c 			dma_sync_single_for_cpu(dev->dev, dma2->pbuf[dma->cbuf],
dma              2162 drivers/media/pci/ddbridge/ddbridge-core.c 					 dma2->vbuf[dma->cbuf],
dma              2164 drivers/media/pci/ddbridge/ddbridge-core.c 		dma->cbuf = (dma->cbuf + 1) % dma2->num;
dma              2166 drivers/media/pci/ddbridge/ddbridge-core.c 			ddbwritel(dev, (dma->cbuf << 11),
dma              2167 drivers/media/pci/ddbridge/ddbridge-core.c 				  DMA_BUFFER_ACK(dma));
dma              2168 drivers/media/pci/ddbridge/ddbridge-core.c 		dma->stat = safe_ddbreadl(dev, DMA_BUFFER_CURRENT(dma));
dma              2169 drivers/media/pci/ddbridge/ddbridge-core.c 		dma->ctrl = safe_ddbreadl(dev, DMA_BUFFER_CONTROL(dma));
dma              2175 drivers/media/pci/ddbridge/ddbridge-core.c 	struct ddb_dma *dma = container_of(work, struct ddb_dma, work);
dma              2176 drivers/media/pci/ddbridge/ddbridge-core.c 	struct ddb_input *input = (struct ddb_input *)dma->io;
dma              2180 drivers/media/pci/ddbridge/ddbridge-core.c 	spin_lock_irqsave(&dma->lock, flags);
dma              2181 drivers/media/pci/ddbridge/ddbridge-core.c 	if (!dma->running) {
dma              2182 drivers/media/pci/ddbridge/ddbridge-core.c 		spin_unlock_irqrestore(&dma->lock, flags);
dma              2185 drivers/media/pci/ddbridge/ddbridge-core.c 	dma->stat = ddbreadl(dev, DMA_BUFFER_CURRENT(dma));
dma              2186 drivers/media/pci/ddbridge/ddbridge-core.c 	dma->ctrl = ddbreadl(dev, DMA_BUFFER_CONTROL(dma));
dma              2192 drivers/media/pci/ddbridge/ddbridge-core.c 	wake_up(&dma->wq);
dma              2193 drivers/media/pci/ddbridge/ddbridge-core.c 	spin_unlock_irqrestore(&dma->lock, flags);
dma              2199 drivers/media/pci/ddbridge/ddbridge-core.c 	struct ddb_dma *dma = input->dma;
dma              2201 drivers/media/pci/ddbridge/ddbridge-core.c 	queue_work(ddb_wq, &dma->work);
dma              2206 drivers/media/pci/ddbridge/ddbridge-core.c 	struct ddb_dma *dma = container_of(work, struct ddb_dma, work);
dma              2207 drivers/media/pci/ddbridge/ddbridge-core.c 	struct ddb_output *output = (struct ddb_output *)dma->io;
dma              2211 drivers/media/pci/ddbridge/ddbridge-core.c 	spin_lock_irqsave(&dma->lock, flags);
dma              2212 drivers/media/pci/ddbridge/ddbridge-core.c 	if (!dma->running)
dma              2214 drivers/media/pci/ddbridge/ddbridge-core.c 	dma->stat = ddbreadl(dev, DMA_BUFFER_CURRENT(dma));
dma              2215 drivers/media/pci/ddbridge/ddbridge-core.c 	dma->ctrl = ddbreadl(dev, DMA_BUFFER_CONTROL(dma));
dma              2218 drivers/media/pci/ddbridge/ddbridge-core.c 	wake_up(&dma->wq);
dma              2220 drivers/media/pci/ddbridge/ddbridge-core.c 	spin_unlock_irqrestore(&dma->lock, flags);
dma              2226 drivers/media/pci/ddbridge/ddbridge-core.c 	struct ddb_dma *dma = output->dma;
dma              2228 drivers/media/pci/ddbridge/ddbridge-core.c 	queue_work(ddb_wq, &dma->work);
dma              2251 drivers/media/pci/ddbridge/ddbridge-core.c 	struct ddb_dma *dma;
dma              2254 drivers/media/pci/ddbridge/ddbridge-core.c 	dma = out ? &io->port->dev->odma[nr] : &io->port->dev->idma[nr];
dma              2255 drivers/media/pci/ddbridge/ddbridge-core.c 	io->dma = dma;
dma              2256 drivers/media/pci/ddbridge/ddbridge-core.c 	dma->io = io;
dma              2258 drivers/media/pci/ddbridge/ddbridge-core.c 	spin_lock_init(&dma->lock);
dma              2259 drivers/media/pci/ddbridge/ddbridge-core.c 	init_waitqueue_head(&dma->wq);
dma              2261 drivers/media/pci/ddbridge/ddbridge-core.c 		INIT_WORK(&dma->work, output_work);
dma              2262 drivers/media/pci/ddbridge/ddbridge-core.c 		dma->regs = rm->odma->base + rm->odma->size * nr;
dma              2263 drivers/media/pci/ddbridge/ddbridge-core.c 		dma->bufregs = rm->odma_buf->base + rm->odma_buf->size * nr;
dma              2264 drivers/media/pci/ddbridge/ddbridge-core.c 		dma->num = dma_buf_num;
dma              2265 drivers/media/pci/ddbridge/ddbridge-core.c 		dma->size = dma_buf_size * 128 * 47;
dma              2266 drivers/media/pci/ddbridge/ddbridge-core.c 		dma->div = 1;
dma              2268 drivers/media/pci/ddbridge/ddbridge-core.c 		INIT_WORK(&dma->work, input_work);
dma              2269 drivers/media/pci/ddbridge/ddbridge-core.c 		dma->regs = rm->idma->base + rm->idma->size * nr;
dma              2270 drivers/media/pci/ddbridge/ddbridge-core.c 		dma->bufregs = rm->idma_buf->base + rm->idma_buf->size * nr;
dma              2271 drivers/media/pci/ddbridge/ddbridge-core.c 		dma->num = dma_buf_num;
dma              2272 drivers/media/pci/ddbridge/ddbridge-core.c 		dma->size = dma_buf_size * 128 * 47;
dma              2273 drivers/media/pci/ddbridge/ddbridge-core.c 		dma->div = 1;
dma              2275 drivers/media/pci/ddbridge/ddbridge-core.c 	ddbwritel(io->port->dev, 0, DMA_BUFFER_ACK(dma));
dma              2277 drivers/media/pci/ddbridge/ddbridge-core.c 		io->port->lnr, io->nr, nr, dma->regs, dma->bufregs);
dma              2462 drivers/media/pci/ddbridge/ddbridge-core.c 		if (port->input[0] && port->input[0]->dma)
dma              2463 drivers/media/pci/ddbridge/ddbridge-core.c 			cancel_work_sync(&port->input[0]->dma->work);
dma              2464 drivers/media/pci/ddbridge/ddbridge-core.c 		if (port->input[1] && port->input[1]->dma)
dma              2465 drivers/media/pci/ddbridge/ddbridge-core.c 			cancel_work_sync(&port->input[1]->dma->work);
dma              2466 drivers/media/pci/ddbridge/ddbridge-core.c 		if (port->output && port->output->dma)
dma              2467 drivers/media/pci/ddbridge/ddbridge-core.c 			cancel_work_sync(&port->output->dma->work);
dma               200 drivers/media/pci/ddbridge/ddbridge.h 	struct ddb_dma        *dma;
dma               129 drivers/media/pci/intel/ipu3/ipu3-cio2.h #define CIO2_REG_INT_EN_IOS(dma)	(1 << (((dma) >> 1) + 12))
dma               134 drivers/media/pci/intel/ipu3/ipu3-cio2.h #define CIO2_INT_IOC(dma)	(1 << ((dma) < 4 ? (dma) : ((dma) >> 1) + 2))
dma               137 drivers/media/pci/intel/ipu3/ipu3-cio2.h #define CIO2_INT_IOS_IOLN(dma)		(1 << (((dma) >> 1) + 12))
dma               341 drivers/media/pci/ivtv/ivtv-driver.h 	int dma;			/* can be PCI_DMA_TODEVICE, PCI_DMA_FROMDEVICE or PCI_DMA_NONE */
dma               191 drivers/media/pci/ivtv/ivtv-queue.c 		s->dma != PCI_DMA_NONE ? "DMA " : "",
dma               241 drivers/media/pci/ivtv/ivtv-queue.c 				buf->buf, s->buf_size + 256, s->dma);
dma               264 drivers/media/pci/ivtv/ivtv-queue.c 				s->buf_size + 256, s->dma);
dma                20 drivers/media/pci/ivtv/ivtv-queue.h 	return s->dma == PCI_DMA_NONE || (SLICED_VBI_PIO && s->type == IVTV_ENC_STREAM_TYPE_VBI);
dma                27 drivers/media/pci/ivtv/ivtv-queue.h 	return s->dma == PCI_DMA_NONE ||
dma                33 drivers/media/pci/ivtv/ivtv-queue.h 	return s->dma != PCI_DMA_NONE;
dma                45 drivers/media/pci/ivtv/ivtv-queue.h 				s->buf_size + 256, s->dma);
dma                52 drivers/media/pci/ivtv/ivtv-queue.h 				s->buf_size + 256, s->dma);
dma                96 drivers/media/pci/ivtv/ivtv-streams.c 	int dma, pio;
dma               182 drivers/media/pci/ivtv/ivtv-streams.c 		s->dma = PCI_DMA_NONE;
dma               184 drivers/media/pci/ivtv/ivtv-streams.c 		s->dma = ivtv_stream_info[type].dma;
dma               220 drivers/media/pci/ivtv/ivtv-streams.c 	if (ivtv_stream_info[type].dma != PCI_DMA_NONE &&
dma                25 drivers/media/pci/ivtv/ivtv-udma.c int ivtv_udma_fill_sg_list (struct ivtv_user_dma *dma, struct ivtv_dma_page_info *dma_page, int map_offset)
dma                40 drivers/media/pci/ivtv/ivtv-udma.c 		if (PageHighMem(dma->map[map_offset])) {
dma                43 drivers/media/pci/ivtv/ivtv-udma.c 			if (dma->bouncemap[map_offset] == NULL)
dma                44 drivers/media/pci/ivtv/ivtv-udma.c 				dma->bouncemap[map_offset] = alloc_page(GFP_KERNEL);
dma                45 drivers/media/pci/ivtv/ivtv-udma.c 			if (dma->bouncemap[map_offset] == NULL)
dma                48 drivers/media/pci/ivtv/ivtv-udma.c 			src = kmap_atomic(dma->map[map_offset]) + offset;
dma                49 drivers/media/pci/ivtv/ivtv-udma.c 			memcpy(page_address(dma->bouncemap[map_offset]) + offset, src, len);
dma                52 drivers/media/pci/ivtv/ivtv-udma.c 			sg_set_page(&dma->SGlist[map_offset], dma->bouncemap[map_offset], len, offset);
dma                55 drivers/media/pci/ivtv/ivtv-udma.c 			sg_set_page(&dma->SGlist[map_offset], dma->map[map_offset], len, offset);
dma                63 drivers/media/pci/ivtv/ivtv-udma.c void ivtv_udma_fill_sg_array (struct ivtv_user_dma *dma, u32 buffer_offset, u32 buffer_offset_2, u32 split) {
dma                67 drivers/media/pci/ivtv/ivtv-udma.c 	for_each_sg(dma->SGlist, sg, dma->SG_length, i) {
dma                68 drivers/media/pci/ivtv/ivtv-udma.c 		dma->SGarray[i].size = cpu_to_le32(sg_dma_len(sg));
dma                69 drivers/media/pci/ivtv/ivtv-udma.c 		dma->SGarray[i].src = cpu_to_le32(sg_dma_address(sg));
dma                70 drivers/media/pci/ivtv/ivtv-udma.c 		dma->SGarray[i].dst = cpu_to_le32(buffer_offset);
dma                94 drivers/media/pci/ivtv/ivtv-udma.c 	struct ivtv_user_dma *dma = &itv->udma;
dma               100 drivers/media/pci/ivtv/ivtv-udma.c 	if (dma->SG_length || dma->page_count) {
dma               102 drivers/media/pci/ivtv/ivtv-udma.c 			   dma->SG_length, dma->page_count);
dma               116 drivers/media/pci/ivtv/ivtv-udma.c 			dma->map, FOLL_FORCE);
dma               123 drivers/media/pci/ivtv/ivtv-udma.c 				put_page(dma->map[i]);
dma               129 drivers/media/pci/ivtv/ivtv-udma.c 	dma->page_count = user_dma.page_count;
dma               132 drivers/media/pci/ivtv/ivtv-udma.c 	if (ivtv_udma_fill_sg_list(dma, &user_dma, 0) < 0) {
dma               133 drivers/media/pci/ivtv/ivtv-udma.c 		for (i = 0; i < dma->page_count; i++) {
dma               134 drivers/media/pci/ivtv/ivtv-udma.c 			put_page(dma->map[i]);
dma               136 drivers/media/pci/ivtv/ivtv-udma.c 		dma->page_count = 0;
dma               141 drivers/media/pci/ivtv/ivtv-udma.c 	dma->SG_length = pci_map_sg(itv->pdev, dma->SGlist, dma->page_count, PCI_DMA_TODEVICE);
dma               144 drivers/media/pci/ivtv/ivtv-udma.c 	ivtv_udma_fill_sg_array (dma, ivtv_dest_addr, 0, -1);
dma               147 drivers/media/pci/ivtv/ivtv-udma.c 	dma->SGarray[dma->SG_length - 1].size |= cpu_to_le32(0x80000000);
dma               150 drivers/media/pci/ivtv/ivtv-udma.c 	return dma->page_count;
dma               155 drivers/media/pci/ivtv/ivtv-udma.c 	struct ivtv_user_dma *dma = &itv->udma;
dma               161 drivers/media/pci/ivtv/ivtv-udma.c 	if (dma->page_count == 0)
dma               165 drivers/media/pci/ivtv/ivtv-udma.c 	if (dma->SG_length) {
dma               166 drivers/media/pci/ivtv/ivtv-udma.c 		pci_unmap_sg(itv->pdev, dma->SGlist, dma->page_count, PCI_DMA_TODEVICE);
dma               167 drivers/media/pci/ivtv/ivtv-udma.c 		dma->SG_length = 0;
dma               173 drivers/media/pci/ivtv/ivtv-udma.c 	for (i = 0; i < dma->page_count; i++) {
dma               174 drivers/media/pci/ivtv/ivtv-udma.c 		put_page(dma->map[i]);
dma               176 drivers/media/pci/ivtv/ivtv-udma.c 	dma->page_count = 0;
dma                14 drivers/media/pci/ivtv/ivtv-udma.h int ivtv_udma_fill_sg_list(struct ivtv_user_dma *dma, struct ivtv_dma_page_info *dma_page, int map_offset);
dma                15 drivers/media/pci/ivtv/ivtv-udma.h void ivtv_udma_fill_sg_array(struct ivtv_user_dma *dma, u32 buffer_offset, u32 buffer_offset_2, u32 split);
dma                25 drivers/media/pci/ivtv/ivtv-yuv.c static int ivtv_yuv_prep_user_dma(struct ivtv *itv, struct ivtv_user_dma *dma,
dma                55 drivers/media/pci/ivtv/ivtv-yuv.c 	if (dma->SG_length || dma->page_count) {
dma                58 drivers/media/pci/ivtv/ivtv-yuv.c 		     dma->SG_length, dma->page_count);
dma                67 drivers/media/pci/ivtv/ivtv-yuv.c 			y_dma.page_count, &dma->map[0], FOLL_FORCE);
dma                71 drivers/media/pci/ivtv/ivtv-yuv.c 				uv_dma.page_count, &dma->map[y_pages],
dma                85 drivers/media/pci/ivtv/ivtv-yuv.c 					put_page(dma->map[y_pages + i]);
dma                97 drivers/media/pci/ivtv/ivtv-yuv.c 				put_page(dma->map[i]);
dma               110 drivers/media/pci/ivtv/ivtv-yuv.c 	dma->page_count = y_pages + uv_pages;
dma               113 drivers/media/pci/ivtv/ivtv-yuv.c 	if (ivtv_udma_fill_sg_list (dma, &uv_dma, ivtv_udma_fill_sg_list (dma, &y_dma, 0)) < 0) {
dma               115 drivers/media/pci/ivtv/ivtv-yuv.c 		for (i = 0; i < dma->page_count; i++) {
dma               116 drivers/media/pci/ivtv/ivtv-yuv.c 			put_page(dma->map[i]);
dma               118 drivers/media/pci/ivtv/ivtv-yuv.c 		dma->page_count = 0;
dma               121 drivers/media/pci/ivtv/ivtv-yuv.c 	dma->SG_length = pci_map_sg(itv->pdev, dma->SGlist, dma->page_count, PCI_DMA_TODEVICE);
dma               124 drivers/media/pci/ivtv/ivtv-yuv.c 	ivtv_udma_fill_sg_array(dma, y_buffer_offset, uv_buffer_offset, y_size);
dma               128 drivers/media/pci/ivtv/ivtv-yuv.c 		dma->SGarray[dma->SG_length].size = cpu_to_le32(720*16);
dma               129 drivers/media/pci/ivtv/ivtv-yuv.c 		dma->SGarray[dma->SG_length].src = cpu_to_le32(yi->blanking_dmaptr);
dma               130 drivers/media/pci/ivtv/ivtv-yuv.c 		dma->SGarray[dma->SG_length].dst = cpu_to_le32(IVTV_DECODER_OFFSET + yuv_offset[frame]);
dma               131 drivers/media/pci/ivtv/ivtv-yuv.c 		dma->SG_length++;
dma               135 drivers/media/pci/ivtv/ivtv-yuv.c 	dma->SGarray[dma->SG_length - 1].size |= cpu_to_le32(0x80000000);
dma               125 drivers/media/pci/meye/meye.c 		dma_addr_t dma;
dma               128 drivers/media/pci/meye/meye.c 							  &dma,
dma               134 drivers/media/pci/meye/meye.c 				dma = (dma_addr_t) *pt;
dma               137 drivers/media/pci/meye/meye.c 						  meye.mchip_ptable[j], dma);
dma               148 drivers/media/pci/meye/meye.c 		*pt = (u32) dma;
dma               161 drivers/media/pci/meye/meye.c 		dma_addr_t dma = (dma_addr_t) *pt;
dma               165 drivers/media/pci/meye/meye.c 					  meye.mchip_ptable[i], dma);
dma               119 drivers/media/pci/netup_unidvb/netup_unidvb.h 	struct netup_dma		dma[2];
dma               112 drivers/media/pci/netup_unidvb/netup_unidvb_core.c static void netup_unidvb_queue_cleanup(struct netup_dma *dma);
dma               145 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	struct netup_dma *dma = priv;
dma               150 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	ndev = dma->ndev;
dma               152 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 		__func__, dma->num, is_dvb_tc);
dma               154 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	mask = (dma->num == 0) ? GPIO_RFA_CTL : GPIO_RFB_CTL;
dma               189 drivers/media/pci/netup_unidvb/netup_unidvb_core.c static void netup_unidvb_dma_enable(struct netup_dma *dma, int enable)
dma               191 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	u32 irq_mask = (dma->num == 0 ?
dma               194 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	dev_dbg(&dma->ndev->pci_dev->dev,
dma               195 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 		"%s(): DMA%d enable %d\n", __func__, dma->num, enable);
dma               197 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 		writel(BIT_DMA_RUN, &dma->regs->ctrlstat_set);
dma               198 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 		writew(irq_mask, dma->ndev->bmmio0 + REG_IMASK_SET);
dma               200 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 		writel(BIT_DMA_RUN, &dma->regs->ctrlstat_clear);
dma               201 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 		writew(irq_mask, dma->ndev->bmmio0 + REG_IMASK_CLEAR);
dma               205 drivers/media/pci/netup_unidvb/netup_unidvb_core.c static irqreturn_t netup_dma_interrupt(struct netup_dma *dma)
dma               210 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	struct device *dev = &dma->ndev->pci_dev->dev;
dma               212 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	spin_lock_irqsave(&dma->lock, flags);
dma               213 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	addr_curr = ((u64)readl(&dma->regs->curr_addr_hi) << 32) |
dma               214 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 		(u64)readl(&dma->regs->curr_addr_lo) | dma->high_addr;
dma               216 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	writel(BIT_DMA_IRQ, &dma->regs->ctrlstat_clear);
dma               218 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	if (addr_curr < dma->addr_phys ||
dma               219 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 			addr_curr > dma->addr_phys +  dma->ring_buffer_size) {
dma               223 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 				__func__, addr_curr, (u64)dma->addr_phys,
dma               224 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 				(u64)(dma->addr_phys + dma->ring_buffer_size));
dma               228 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	size = (addr_curr >= dma->addr_last) ?
dma               229 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 		(u32)(addr_curr - dma->addr_last) :
dma               230 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 		(u32)(dma->ring_buffer_size - (dma->addr_last - addr_curr));
dma               231 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	if (dma->data_size != 0) {
dma               233 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 			__func__, dma->data_size);
dma               234 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 		dma->data_size += size;
dma               236 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	if (dma->data_size == 0 || dma->data_size > dma->ring_buffer_size) {
dma               237 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 		dma->data_size = size;
dma               238 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 		dma->data_offset = (u32)(dma->addr_last - dma->addr_phys);
dma               240 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	dma->addr_last = addr_curr;
dma               241 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	queue_work(dma->ndev->wq, &dma->work);
dma               243 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	spin_unlock_irqrestore(&dma->lock, flags);
dma               268 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 			iret = netup_dma_interrupt(&ndev->dma[0]);
dma               270 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 			iret = netup_dma_interrupt(&ndev->dma[1]);
dma               290 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	struct netup_dma *dma = vb2_get_drv_priv(vq);
dma               292 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	dev_dbg(&dma->ndev->pci_dev->dev, "%s()\n", __func__);
dma               298 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	dev_dbg(&dma->ndev->pci_dev->dev, "%s() nbuffers=%d sizes[0]=%d\n",
dma               305 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	struct netup_dma *dma = vb2_get_drv_priv(vb->vb2_queue);
dma               310 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	dev_dbg(&dma->ndev->pci_dev->dev, "%s(): buf 0x%p\n", __func__, buf);
dma               318 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	struct netup_dma *dma = vb2_get_drv_priv(vb->vb2_queue);
dma               323 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	dev_dbg(&dma->ndev->pci_dev->dev, "%s(): %p\n", __func__, buf);
dma               324 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	spin_lock_irqsave(&dma->lock, flags);
dma               325 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	list_add_tail(&buf->list, &dma->free_buffers);
dma               326 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	spin_unlock_irqrestore(&dma->lock, flags);
dma               327 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	mod_timer(&dma->timeout, jiffies + msecs_to_jiffies(1000));
dma               332 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	struct netup_dma *dma = vb2_get_drv_priv(q);
dma               334 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	dev_dbg(&dma->ndev->pci_dev->dev, "%s()\n", __func__);
dma               335 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	netup_unidvb_dma_enable(dma, 1);
dma               341 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	struct netup_dma *dma = vb2_get_drv_priv(q);
dma               343 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	dev_dbg(&dma->ndev->pci_dev->dev, "%s()\n", __func__);
dma               344 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	netup_unidvb_dma_enable(dma, 0);
dma               345 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	netup_unidvb_queue_cleanup(dma);
dma               356 drivers/media/pci/netup_unidvb/netup_unidvb_core.c static int netup_unidvb_queue_init(struct netup_dma *dma,
dma               364 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	vb_queue->drv_priv = dma;
dma               371 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 		dev_err(&dma->ndev->pci_dev->dev,
dma               419 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 		netup_unidvb_queue_init(&ndev->dma[num], &fes[i]->dvb.dvbq);
dma               434 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 		horus3a_conf.set_tuner_priv = &ndev->dma[num];
dma               443 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 		helene_conf.set_tuner_priv = &ndev->dma[num];
dma               470 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 		ascot2e_conf.set_tuner_priv = &ndev->dma[num];
dma               479 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 		helene_conf.set_tuner_priv = &ndev->dma[num];
dma               532 drivers/media/pci/netup_unidvb/netup_unidvb_core.c static int netup_unidvb_ring_copy(struct netup_dma *dma,
dma               538 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	struct netup_unidvb_dev *ndev = dma->ndev;
dma               546 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	if (dma->data_offset + dma->data_size > dma->ring_buffer_size) {
dma               547 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 		ring_bytes = dma->ring_buffer_size - dma->data_offset;
dma               550 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 		memcpy_fromio(p, (u8 __iomem *)(dma->addr_virt + dma->data_offset), copy_bytes);
dma               554 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 		dma->data_size -= copy_bytes;
dma               555 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 		dma->data_offset += copy_bytes;
dma               556 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 		if (dma->data_offset == dma->ring_buffer_size)
dma               557 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 			dma->data_offset = 0;
dma               560 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 		ring_bytes = dma->data_size;
dma               563 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 		memcpy_fromio(p, (u8 __iomem *)(dma->addr_virt + dma->data_offset), copy_bytes);
dma               565 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 		dma->data_size -= copy_bytes;
dma               566 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 		dma->data_offset += copy_bytes;
dma               567 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 		if (dma->data_offset == dma->ring_buffer_size)
dma               568 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 			dma->data_offset = 0;
dma               575 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	struct netup_dma *dma = container_of(work, struct netup_dma, work);
dma               576 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	struct netup_unidvb_dev *ndev = dma->ndev;
dma               580 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	spin_lock_irqsave(&dma->lock, flags);
dma               581 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	if (dma->data_size == 0) {
dma               586 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	while (dma->data_size > 0) {
dma               587 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 		if (list_empty(&dma->free_buffers)) {
dma               592 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 		buf = list_first_entry(&dma->free_buffers,
dma               600 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 		if (netup_unidvb_ring_copy(dma, buf))
dma               613 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	dma->data_size = 0;
dma               614 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	spin_unlock_irqrestore(&dma->lock, flags);
dma               617 drivers/media/pci/netup_unidvb/netup_unidvb_core.c static void netup_unidvb_queue_cleanup(struct netup_dma *dma)
dma               622 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	spin_lock_irqsave(&dma->lock, flags);
dma               623 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	while (!list_empty(&dma->free_buffers)) {
dma               624 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 		buf = list_first_entry(&dma->free_buffers,
dma               629 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	spin_unlock_irqrestore(&dma->lock, flags);
dma               634 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	struct netup_dma *dma = from_timer(dma, t, timeout);
dma               635 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	struct netup_unidvb_dev *ndev = dma->ndev;
dma               638 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	netup_unidvb_queue_cleanup(dma);
dma               643 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	struct netup_dma *dma;
dma               651 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	dma = &ndev->dma[num];
dma               653 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	dma->num = num;
dma               654 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	dma->ndev = ndev;
dma               655 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	spin_lock_init(&dma->lock);
dma               656 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	INIT_WORK(&dma->work, netup_unidvb_dma_worker);
dma               657 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	INIT_LIST_HEAD(&dma->free_buffers);
dma               658 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	timer_setup(&dma->timeout, netup_unidvb_dma_timeout, 0);
dma               659 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	dma->ring_buffer_size = ndev->dma_size / 2;
dma               660 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	dma->addr_virt = ndev->dma_virt + dma->ring_buffer_size * num;
dma               661 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	dma->addr_phys = (dma_addr_t)((u64)ndev->dma_phys +
dma               662 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 		dma->ring_buffer_size * num);
dma               664 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 		__func__, num, dma->addr_virt,
dma               665 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 		(unsigned long long)dma->addr_phys,
dma               666 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 		dma->ring_buffer_size);
dma               667 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	memset_io((u8 __iomem *)dma->addr_virt, 0, dma->ring_buffer_size);
dma               668 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	dma->addr_last = dma->addr_phys;
dma               669 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	dma->high_addr = (u32)(dma->addr_phys & 0xC0000000);
dma               670 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	dma->regs = (struct netup_dma_regs __iomem *)(num == 0 ?
dma               674 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 		(NETUP_DMA_PACKETS_COUNT << 8) | 188, &dma->regs->size);
dma               675 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	writel((u32)(dma->addr_phys & 0x3FFFFFFF), &dma->regs->start_addr_lo);
dma               676 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	writel(0, &dma->regs->start_addr_hi);
dma               677 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	writel(dma->high_addr, ndev->bmmio0 + 0x1000);
dma               678 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	writel(375000000, &dma->regs->timeout);
dma               680 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	writel(BIT_DMA_IRQ, &dma->regs->ctrlstat_clear);
dma               686 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	struct netup_dma *dma;
dma               691 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	dma = &ndev->dma[num];
dma               692 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	netup_unidvb_dma_enable(dma, 0);
dma               694 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	cancel_work_sync(&dma->work);
dma               695 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	del_timer(&dma->timeout);
dma               710 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	netup_unidvb_dma_enable(&ndev->dma[0], 0);
dma               711 drivers/media/pci/netup_unidvb/netup_unidvb_core.c 	netup_unidvb_dma_enable(&ndev->dma[1], 0);
dma               257 drivers/media/pci/saa7134/saa7134-alsa.c 	struct saa7134_dmasound *dma = &dev->dmasound;
dma               261 drivers/media/pci/saa7134/saa7134-alsa.c 	dma->vaddr = vmalloc_32(nr_pages << PAGE_SHIFT);
dma               262 drivers/media/pci/saa7134/saa7134-alsa.c 	if (NULL == dma->vaddr) {
dma               268 drivers/media/pci/saa7134/saa7134-alsa.c 		 dma->vaddr, nr_pages << PAGE_SHIFT);
dma               270 drivers/media/pci/saa7134/saa7134-alsa.c 	memset(dma->vaddr, 0, nr_pages << PAGE_SHIFT);
dma               271 drivers/media/pci/saa7134/saa7134-alsa.c 	dma->nr_pages = nr_pages;
dma               273 drivers/media/pci/saa7134/saa7134-alsa.c 	dma->sglist = vzalloc(array_size(sizeof(*dma->sglist), dma->nr_pages));
dma               274 drivers/media/pci/saa7134/saa7134-alsa.c 	if (NULL == dma->sglist)
dma               277 drivers/media/pci/saa7134/saa7134-alsa.c 	sg_init_table(dma->sglist, dma->nr_pages);
dma               278 drivers/media/pci/saa7134/saa7134-alsa.c 	for (i = 0; i < dma->nr_pages; i++) {
dma               279 drivers/media/pci/saa7134/saa7134-alsa.c 		pg = vmalloc_to_page(dma->vaddr + i * PAGE_SIZE);
dma               282 drivers/media/pci/saa7134/saa7134-alsa.c 		sg_set_page(&dma->sglist[i], pg, PAGE_SIZE, 0);
dma               287 drivers/media/pci/saa7134/saa7134-alsa.c 	vfree(dma->sglist);
dma               288 drivers/media/pci/saa7134/saa7134-alsa.c 	dma->sglist = NULL;
dma               290 drivers/media/pci/saa7134/saa7134-alsa.c 	vfree(dma->vaddr);
dma               291 drivers/media/pci/saa7134/saa7134-alsa.c 	dma->vaddr = NULL;
dma               297 drivers/media/pci/saa7134/saa7134-alsa.c 	struct saa7134_dmasound *dma = &dev->dmasound;
dma               299 drivers/media/pci/saa7134/saa7134-alsa.c 	dma->sglen = dma_map_sg(&dev->pci->dev, dma->sglist,
dma               300 drivers/media/pci/saa7134/saa7134-alsa.c 			dma->nr_pages, PCI_DMA_FROMDEVICE);
dma               302 drivers/media/pci/saa7134/saa7134-alsa.c 	if (0 == dma->sglen) {
dma               311 drivers/media/pci/saa7134/saa7134-alsa.c 	struct saa7134_dmasound *dma = &dev->dmasound;
dma               313 drivers/media/pci/saa7134/saa7134-alsa.c 	if (!dma->sglen)
dma               316 drivers/media/pci/saa7134/saa7134-alsa.c 	dma_unmap_sg(&dev->pci->dev, dma->sglist, dma->sglen, PCI_DMA_FROMDEVICE);
dma               317 drivers/media/pci/saa7134/saa7134-alsa.c 	dma->sglen = 0;
dma               321 drivers/media/pci/saa7134/saa7134-alsa.c static int saa7134_alsa_dma_free(struct saa7134_dmasound *dma)
dma               323 drivers/media/pci/saa7134/saa7134-alsa.c 	vfree(dma->sglist);
dma               324 drivers/media/pci/saa7134/saa7134-alsa.c 	dma->sglist = NULL;
dma               325 drivers/media/pci/saa7134/saa7134-alsa.c 	vfree(dma->vaddr);
dma               326 drivers/media/pci/saa7134/saa7134-alsa.c 	dma->vaddr = NULL;
dma               559 drivers/media/pci/saa7134/saa7134-alsa.c 		(dev->dmasound.pt.dma >> 12);
dma               212 drivers/media/pci/saa7134/saa7134-core.c 	struct sg_table *dma = vb2_dma_sg_plane_desc(&buf->vb2.vb2_buf, 0);
dma               215 drivers/media/pci/saa7134/saa7134-core.c 	base += dma->sgl[0].offset;
dma               231 drivers/media/pci/saa7134/saa7134-core.c 	pt->dma  = dma_addr;
dma               257 drivers/media/pci/saa7134/saa7134-core.c 	pci_free_consistent(pci, pt->size, pt->cpu, pt->dma);
dma                86 drivers/media/pci/saa7134/saa7134-ts.c 	struct sg_table *dma = vb2_dma_sg_plane_desc(vb2, 0);
dma               101 drivers/media/pci/saa7134/saa7134-ts.c 	return saa7134_pgtable_build(dev->pci, &dmaq->pt, dma->sgl, dma->nents,
dma               269 drivers/media/pci/saa7134/saa7134-ts.c 					  (dev->ts_q.pt.dma >> 12));
dma                88 drivers/media/pci/saa7134/saa7134-vbi.c 		(dmaq->pt.dma >> 12);
dma               111 drivers/media/pci/saa7134/saa7134-vbi.c 	struct sg_table *dma = vb2_dma_sg_plane_desc(vb2, 0);
dma               114 drivers/media/pci/saa7134/saa7134-vbi.c 	if (dma->sgl->offset) {
dma               124 drivers/media/pci/saa7134/saa7134-vbi.c 	return saa7134_pgtable_build(dev->pci, &dmaq->pt, dma->sgl, dma->nents,
dma               847 drivers/media/pci/saa7134/saa7134-video.c 		(dmaq->pt.dma >> 12);
dma               919 drivers/media/pci/saa7134/saa7134-video.c 	struct sg_table *dma = vb2_dma_sg_plane_desc(vb2, 0);
dma               922 drivers/media/pci/saa7134/saa7134-video.c 	if (dma->sgl->offset) {
dma               933 drivers/media/pci/saa7134/saa7134-video.c 	return saa7134_pgtable_build(dev->pci, &dmaq->pt, dma->sgl, dma->nents,
dma               450 drivers/media/pci/saa7134/saa7134.h 	dma_addr_t                 dma;
dma                63 drivers/media/pci/saa7164/saa7164-buffer.c 		buf->cpu, (long long)buf->dma, buf->pci_size);
dma               107 drivers/media/pci/saa7164/saa7164-buffer.c 		&buf->dma);
dma               124 drivers/media/pci/saa7164/saa7164-buffer.c 		buf->cpu, (long)buf->dma, buf->pci_size);
dma               131 drivers/media/pci/saa7164/saa7164-buffer.c 		*(buf->pt_cpu + i) = buf->dma + (i * 0x1000); /* TODO */
dma               140 drivers/media/pci/saa7164/saa7164-buffer.c 	pci_free_consistent(port->dev->pci, buf->pci_size, buf->cpu, buf->dma);
dma               163 drivers/media/pci/saa7164/saa7164-buffer.c 	pci_free_consistent(dev->pci, buf->pci_size, buf->cpu, buf->dma);
dma               308 drivers/media/pci/saa7164/saa7164.h 	dma_addr_t dma;	/* Physical address */
dma               276 drivers/media/pci/solo6x10/solo6x10-v4l2-enc.c static int enc_get_mpeg_dma(struct solo_dev *solo_dev, dma_addr_t dma,
dma               286 drivers/media/pci/solo6x10/solo6x10-v4l2-enc.c 		return solo_p2m_dma_t(solo_dev, 0, dma,
dma               292 drivers/media/pci/solo6x10/solo6x10-v4l2-enc.c 	ret = solo_p2m_dma_t(solo_dev, 0, dma,
dma               298 drivers/media/pci/solo6x10/solo6x10-v4l2-enc.c 			     dma + SOLO_MP4E_EXT_SIZE(solo_dev) - off,
dma               324 drivers/media/pci/solo6x10/solo6x10-v4l2-enc.c 		dma_addr_t dma;
dma               329 drivers/media/pci/solo6x10/solo6x10-v4l2-enc.c 		dma = sg_dma_address(sg);
dma               336 drivers/media/pci/solo6x10/solo6x10-v4l2-enc.c 			dma += skip;
dma               345 drivers/media/pci/solo6x10/solo6x10-v4l2-enc.c 			solo_p2m_fill_desc(desc, 0, dma, base + off,
dma               354 drivers/media/pci/solo6x10/solo6x10-v4l2-enc.c 			ret = solo_p2m_dma_t(solo_dev, 0, dma, base + off,
dma               359 drivers/media/pci/solo6x10/solo6x10-v4l2-enc.c 			ret = solo_p2m_dma_t(solo_dev, 0, dma + left, base,
dma                77 drivers/media/pci/sta2x11/sta2x11_vip.c 	dma_addr_t		dma;
dma               224 drivers/media/pci/sta2x11/sta2x11_vip.c 	reg_write(vip, DVP_VTP, (u32)vip_buf->dma);
dma               225 drivers/media/pci/sta2x11/sta2x11_vip.c 	reg_write(vip, DVP_VBP, (u32)vip_buf->dma + offset);
dma               276 drivers/media/pci/sta2x11/sta2x11_vip.c 	vip_buf->dma = vb2_dma_contig_plane_dma_addr(vb, 0);
dma              2516 drivers/media/pci/ttpci/av7110.c 		saa7146_write(dev, BASE_PAGE3, av7110->pt.dma | ME1 | 0x90);
dma              2539 drivers/media/pci/ttpci/av7110.c 		saa7146_write(dev, BASE_PAGE3, av7110->pt.dma | ME1 | 0x90);
dma               132 drivers/media/pci/ttpci/budget-core.c 	saa7146_write(dev, BASE_PAGE3, budget->pt.dma | ME1 | 0x90);
dma               154 drivers/media/pci/tw68/tw68-risc.c 	buf->cpu = pci_alloc_consistent(pci, buf->size, &buf->dma);
dma               169 drivers/media/pci/tw68/tw68-risc.c 	buf->cpu[1] = cpu_to_le32(buf->dma + 8);
dma               330 drivers/media/pci/tw68/tw68-video.c 	tw_writel(TW68_DMAP_SA, buf->dma);
dma               416 drivers/media/pci/tw68/tw68-video.c 	buf->jmp[1] = cpu_to_le32(buf->dma + 8);
dma               421 drivers/media/pci/tw68/tw68-video.c 		prev->jmp[1] = cpu_to_le32(buf->dma);
dma               444 drivers/media/pci/tw68/tw68-video.c 	struct sg_table *dma = vb2_dma_sg_plane_desc(vb, 0);
dma               455 drivers/media/pci/tw68/tw68-video.c 		tw68_risc_buffer(dev->pci, buf, dma->sgl,
dma               459 drivers/media/pci/tw68/tw68-video.c 		tw68_risc_buffer(dev->pci, buf, dma->sgl,
dma               463 drivers/media/pci/tw68/tw68-video.c 		tw68_risc_buffer(dev->pci, buf, dma->sgl,
dma               468 drivers/media/pci/tw68/tw68-video.c 		tw68_risc_buffer(dev->pci, buf, dma->sgl,
dma               474 drivers/media/pci/tw68/tw68-video.c 		tw68_risc_buffer(dev->pci, buf, dma->sgl,
dma               488 drivers/media/pci/tw68/tw68-video.c 	pci_free_consistent(dev->pci, buf->size, buf->cpu, buf->dma);
dma               118 drivers/media/pci/tw68/tw68.h 	dma_addr_t     dma;
dma                74 drivers/media/pci/tw686x/tw686x-audio.c 			reg_write(dev, reg, next->dma);
dma                76 drivers/media/pci/tw686x/tw686x-audio.c 		ac->ptr = done->dma - ac->buf[0].dma;
dma               192 drivers/media/pci/tw686x/tw686x-audio.c 		ac->buf[i].dma = rt->dma_addr + period_size * i;
dma               209 drivers/media/pci/tw686x/tw686x-audio.c 		reg_write(dev, ADMA_P_ADDR[ac->ch], p_buf->dma);
dma               210 drivers/media/pci/tw686x/tw686x-audio.c 		reg_write(dev, ADMA_B_ADDR[ac->ch], b_buf->dma);
dma                55 drivers/media/pci/tw686x/tw686x.h 	dma_addr_t dma;
dma               195 drivers/media/platform/aspeed-video.c 	dma_addr_t dma;
dma               672 drivers/media/platform/aspeed-video.c 	addr->virt = dma_alloc_coherent(video->dev, size, &addr->dma,
dma               684 drivers/media/platform/aspeed-video.c 	dma_free_coherent(video->dev, addr->size, addr->virt, addr->dma);
dma               686 drivers/media/platform/aspeed-video.c 	addr->dma = 0ULL;
dma               892 drivers/media/platform/aspeed-video.c 		aspeed_video_write(video, VE_SRC0_ADDR, video->srcs[0].dma);
dma               893 drivers/media/platform/aspeed-video.c 		aspeed_video_write(video, VE_SRC1_ADDR, video->srcs[1].dma);
dma               930 drivers/media/platform/aspeed-video.c 	aspeed_video_write(video, VE_JPEG_ADDR, video->jpeg.dma);
dma              1713 drivers/media/platform/aspeed-video.c 			  video->jpeg.dma);
dma                81 drivers/media/platform/exynos4-is/fimc-isp-video.c 	struct param_dma_output *dma = __get_isp_dma2(is);
dma                90 drivers/media/platform/exynos4-is/fimc-isp-video.c 	dma->cmd = DMA_OUTPUT_COMMAND_ENABLE;
dma                91 drivers/media/platform/exynos4-is/fimc-isp-video.c 	dma->notify_dma_done = DMA_OUTPUT_NOTIFY_DMA_DONE_ENABLE;
dma                92 drivers/media/platform/exynos4-is/fimc-isp-video.c 	dma->buffer_address = is->is_dma_p_region +
dma                94 drivers/media/platform/exynos4-is/fimc-isp-video.c 	dma->buffer_number = video->reqbufs_count;
dma                95 drivers/media/platform/exynos4-is/fimc-isp-video.c 	dma->dma_out_mask = video->buf_mask;
dma               100 drivers/media/platform/exynos4-is/fimc-isp-video.c 		dma->buffer_address);
dma               123 drivers/media/platform/exynos4-is/fimc-isp-video.c 	struct param_dma_output *dma = __get_isp_dma2(is);
dma               130 drivers/media/platform/exynos4-is/fimc-isp-video.c 	dma->cmd = DMA_OUTPUT_COMMAND_DISABLE;
dma               131 drivers/media/platform/exynos4-is/fimc-isp-video.c 	dma->notify_dma_done = DMA_OUTPUT_NOTIFY_DMA_DONE_DISABLE;
dma               132 drivers/media/platform/exynos4-is/fimc-isp-video.c 	dma->buffer_number = 0;
dma               133 drivers/media/platform/exynos4-is/fimc-isp-video.c 	dma->buffer_address = 0;
dma               134 drivers/media/platform/exynos4-is/fimc-isp-video.c 	dma->dma_out_mask = 0;
dma               420 drivers/media/platform/exynos4-is/fimc-isp-video.c 	struct param_dma_output *dma = __get_isp_dma2(is);
dma               427 drivers/media/platform/exynos4-is/fimc-isp-video.c 	dma->format = DMA_OUTPUT_FORMAT_BAYER;
dma               428 drivers/media/platform/exynos4-is/fimc-isp-video.c 	dma->order = DMA_OUTPUT_ORDER_GB_BG;
dma               429 drivers/media/platform/exynos4-is/fimc-isp-video.c 	dma->plane = ifmt->memplanes;
dma               430 drivers/media/platform/exynos4-is/fimc-isp-video.c 	dma->bitwidth = ifmt->depth[0];
dma               431 drivers/media/platform/exynos4-is/fimc-isp-video.c 	dma->width = pixm->width;
dma               432 drivers/media/platform/exynos4-is/fimc-isp-video.c 	dma->height = pixm->height;
dma               323 drivers/media/platform/omap3isp/ispccdc.c 	ccdc_lsc_program_table(ccdc, req->table.dma);
dma               361 drivers/media/platform/omap3isp/ispccdc.c 				  req->table.dma);
dma               436 drivers/media/platform/omap3isp/ispccdc.c 						     &req->table.dma,
dma               444 drivers/media/platform/omap3isp/ispccdc.c 				      req->table.addr, req->table.dma,
dma               567 drivers/media/platform/omap3isp/ispccdc.c 	isp_reg_writel(isp, ccdc->fpc.dma, OMAP3_ISP_IOMEM_CCDC,
dma               729 drivers/media/platform/omap3isp/ispccdc.c 							  &fpc_new.dma,
dma               738 drivers/media/platform/omap3isp/ispccdc.c 						  fpc_new.dma);
dma               750 drivers/media/platform/omap3isp/ispccdc.c 					  fpc_old.addr, fpc_old.dma);
dma              1615 drivers/media/platform/omap3isp/ispccdc.c 		ccdc_set_outaddr(ccdc, buffer->dma);
dma              1792 drivers/media/platform/omap3isp/ispccdc.c 	ccdc_set_outaddr(ccdc, buffer->dma);
dma              2735 drivers/media/platform/omap3isp/ispccdc.c 				  ccdc->fpc.dma);
dma                38 drivers/media/platform/omap3isp/ispccdc.h 	dma_addr_t dma;
dma                56 drivers/media/platform/omap3isp/ispccdc.h 		dma_addr_t dma;
dma               543 drivers/media/platform/omap3isp/ispccp2.c 		ccp2_set_inaddr(ccp2, buffer->dma);
dma               934 drivers/media/platform/omap3isp/ispccp2.c 	ccp2_set_inaddr(ccp2, buffer->dma);
dma               691 drivers/media/platform/omap3isp/ispcsi2.c 	csi2_set_outaddr(csi2, buffer->dma);
dma               803 drivers/media/platform/omap3isp/ispcsi2.c 	csi2_set_outaddr(csi2, buffer->dma);
dma              1483 drivers/media/platform/omap3isp/isppreview.c 			preview_set_outaddr(prev, buffer->dma);
dma              1492 drivers/media/platform/omap3isp/isppreview.c 			preview_set_inaddr(prev, buffer->dma);
dma              1561 drivers/media/platform/omap3isp/isppreview.c 		preview_set_inaddr(prev, buffer->dma);
dma              1564 drivers/media/platform/omap3isp/isppreview.c 		preview_set_outaddr(prev, buffer->dma);
dma              1026 drivers/media/platform/omap3isp/ispresizer.c 		resizer_set_outaddr(res, buffer->dma);
dma              1035 drivers/media/platform/omap3isp/ispresizer.c 			resizer_set_inaddr(res, buffer->dma);
dma              1092 drivers/media/platform/omap3isp/ispresizer.c 		resizer_set_inaddr(res, buffer->dma);
dma              1107 drivers/media/platform/omap3isp/ispresizer.c 		resizer_set_outaddr(res, buffer->dma);
dma               373 drivers/media/platform/omap3isp/ispvideo.c 	buffer->dma = addr;
dma               124 drivers/media/platform/omap3isp/ispvideo.h 	dma_addr_t dma;
dma              1143 drivers/media/platform/s5p-mfc/s5p_mfc.c 	mfc_dev->dma_base[BANK_L_CTX] = mfc_dev->fw_buf.dma;
dma                42 drivers/media/platform/s5p-mfc/s5p_mfc_cmd_v6.c 	mfc_write(dev, dev->ctx_buf.dma, S5P_FIMV_CONTEXT_MEM_ADDR_V6);
dma               128 drivers/media/platform/s5p-mfc/s5p_mfc_cmd_v6.c 	mfc_write(dev, ctx->ctx.dma, S5P_FIMV_CONTEXT_MEM_ADDR_V6);
dma               248 drivers/media/platform/s5p-mfc/s5p_mfc_common.h 	dma_addr_t	dma;
dma              2356 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		dma_addr_t dma = vb2_dma_contig_plane_dma_addr(vb, i);
dma              2357 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		if (!dma) {
dma              2362 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 			  vb->index, i, &dma);
dma                55 drivers/media/platform/s5p-mfc/s5p_mfc_opr.c 		b->dma = dev->mem_base + offset;
dma                61 drivers/media/platform/s5p-mfc/s5p_mfc_opr.c 		b->virt = dma_alloc_coherent(mem_dev, b->size, &b->dma, GFP_KERNEL);
dma                64 drivers/media/platform/s5p-mfc/s5p_mfc_opr.c 		if (b->dma < base) {
dma                66 drivers/media/platform/s5p-mfc/s5p_mfc_opr.c 				&b->dma, &base);
dma                67 drivers/media/platform/s5p-mfc/s5p_mfc_opr.c 			dma_free_coherent(mem_dev, b->size, b->virt, b->dma);
dma                72 drivers/media/platform/s5p-mfc/s5p_mfc_opr.c 	mfc_debug(3, "Allocated addr %p %pad\n", b->virt, &b->dma);
dma                87 drivers/media/platform/s5p-mfc/s5p_mfc_opr.c 	b->virt = dma_alloc_coherent(mem_dev, b->size, &b->dma, GFP_KERNEL);
dma                91 drivers/media/platform/s5p-mfc/s5p_mfc_opr.c 	mfc_debug(3, "Allocated addr %p %pad\n", b->virt, &b->dma);
dma               102 drivers/media/platform/s5p-mfc/s5p_mfc_opr.c 		unsigned int start = (b->dma - dev->mem_base) >> PAGE_SHIFT;
dma               109 drivers/media/platform/s5p-mfc/s5p_mfc_opr.c 		dma_free_coherent(mem_dev, b->size, b->virt, b->dma);
dma               112 drivers/media/platform/s5p-mfc/s5p_mfc_opr.c 	b->dma = 0;
dma               120 drivers/media/platform/s5p-mfc/s5p_mfc_opr.c 	dma_free_coherent(mem_dev, b->size, b->virt, b->dma);
dma               122 drivers/media/platform/s5p-mfc/s5p_mfc_opr.c 	b->dma = 0;
dma                47 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	BUG_ON(ctx->dsc.dma & ((1 << MFC_BANK1_ALIGN_ORDER) - 1));
dma               177 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		BUG_ON(ctx->bank1.dma & ((1 << MFC_BANK1_ALIGN_ORDER) - 1));
dma               187 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		BUG_ON(ctx->bank2.dma & ((1 << MFC_BANK2_ALIGN_ORDER) - 1));
dma               217 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	ctx->ctx.ofs = OFFSETA(ctx->ctx.dma);
dma               233 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	ctx->shm.ofs = ctx->shm.dma - dev->dma_base[BANK_L_CTX];
dma               350 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	mfc_write(dev, OFFSETA(ctx->dsc.dma), S5P_FIMV_SI_CH0_DESC_ADR);
dma               385 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	buf_addr1 = ctx->bank1.dma;
dma               387 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	buf_addr2 = ctx->bank2.dma;
dma               548 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	buf_addr1 = ctx->bank1.dma;
dma               550 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	buf_addr2 = ctx->bank2.dma;
dma               321 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		BUG_ON(ctx->bank1.dma & ((1 << MFC_BANK1_ALIGN_ORDER) - 1));
dma               522 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	buf_addr1 = ctx->bank1.dma;
dma               666 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	buf_addr1 = ctx->bank1.dma;
dma               113 drivers/media/platform/vsp1/vsp1_dl.c 	dma_addr_t dma;
dma               132 drivers/media/platform/vsp1/vsp1_dl.c 	dma_addr_t dma;
dma               156 drivers/media/platform/vsp1/vsp1_dl.c 	dma_addr_t dma;
dma               189 drivers/media/platform/vsp1/vsp1_dl.c 	dma_addr_t dma;
dma               276 drivers/media/platform/vsp1/vsp1_dl.c 	pool->mem = dma_alloc_wc(vsp1->bus_master, pool->size, &pool->dma,
dma               293 drivers/media/platform/vsp1/vsp1_dl.c 		dlb->dma = pool->dma + i * dlb_size;
dma               315 drivers/media/platform/vsp1/vsp1_dl.c 			    pool->dma);
dma               448 drivers/media/platform/vsp1/vsp1_dl.c 	pool->mem = dma_alloc_wc(vsp1->bus_master, pool->size, &pool->dma,
dma               472 drivers/media/platform/vsp1/vsp1_dl.c 		cmd->cmd_dma = pool->dma + cmd_offset;
dma               475 drivers/media/platform/vsp1/vsp1_dl.c 		cmd->data_dma = pool->dma + data_offset;
dma               524 drivers/media/platform/vsp1/vsp1_dl.c 			    pool->dma);
dma               568 drivers/media/platform/vsp1/vsp1_dl.c 	dl->dma = dl->body0->dma + header_offset;
dma               571 drivers/media/platform/vsp1/vsp1_dl.c 	dl->header->lists[0].addr = dl->body0->dma;
dma               769 drivers/media/platform/vsp1/vsp1_dl.c 		hdr->addr = dlb->dma;
dma               803 drivers/media/platform/vsp1/vsp1_dl.c 		dl->header->next_header = next->dma;
dma               810 drivers/media/platform/vsp1/vsp1_dl.c 		dl->header->next_header = dl->dma;
dma               861 drivers/media/platform/vsp1/vsp1_dl.c 	vsp1_write(vsp1, VI6_DL_HDR_ADDR(dlm->index), dl->dma);
dma                58 drivers/media/platform/xilinx/xilinx-dma.c static int xvip_dma_verify_format(struct xvip_dma *dma)
dma                64 drivers/media/platform/xilinx/xilinx-dma.c 	subdev = xvip_dma_remote_subdev(&dma->pad, &fmt.pad);
dma                73 drivers/media/platform/xilinx/xilinx-dma.c 	if (dma->fmtinfo->code != fmt.format.code ||
dma                74 drivers/media/platform/xilinx/xilinx-dma.c 	    dma->format.height != fmt.format.height ||
dma                75 drivers/media/platform/xilinx/xilinx-dma.c 	    dma->format.width != fmt.format.width ||
dma                76 drivers/media/platform/xilinx/xilinx-dma.c 	    dma->format.colorspace != fmt.format.colorspace)
dma                99 drivers/media/platform/xilinx/xilinx-dma.c 	struct xvip_dma *dma = pipe->output;
dma               105 drivers/media/platform/xilinx/xilinx-dma.c 	entity = &dma->video.entity;
dma               196 drivers/media/platform/xilinx/xilinx-dma.c 		struct xvip_dma *dma;
dma               201 drivers/media/platform/xilinx/xilinx-dma.c 		dma = to_xvip_dma(media_entity_to_video_device(entity));
dma               203 drivers/media/platform/xilinx/xilinx-dma.c 		if (dma->pad.flags & MEDIA_PAD_FL_SINK) {
dma               204 drivers/media/platform/xilinx/xilinx-dma.c 			pipe->output = dma;
dma               258 drivers/media/platform/xilinx/xilinx-dma.c 				 struct xvip_dma *dma)
dma               266 drivers/media/platform/xilinx/xilinx-dma.c 		ret = xvip_pipeline_validate(pipe, dma);
dma               294 drivers/media/platform/xilinx/xilinx-dma.c 	struct xvip_dma *dma;
dma               302 drivers/media/platform/xilinx/xilinx-dma.c 	struct xvip_dma *dma = buf->dma;
dma               304 drivers/media/platform/xilinx/xilinx-dma.c 	spin_lock(&dma->queued_lock);
dma               306 drivers/media/platform/xilinx/xilinx-dma.c 	spin_unlock(&dma->queued_lock);
dma               309 drivers/media/platform/xilinx/xilinx-dma.c 	buf->buf.sequence = dma->sequence++;
dma               311 drivers/media/platform/xilinx/xilinx-dma.c 	vb2_set_plane_payload(&buf->buf.vb2_buf, 0, dma->format.sizeimage);
dma               320 drivers/media/platform/xilinx/xilinx-dma.c 	struct xvip_dma *dma = vb2_get_drv_priv(vq);
dma               324 drivers/media/platform/xilinx/xilinx-dma.c 		return sizes[0] < dma->format.sizeimage ? -EINVAL : 0;
dma               327 drivers/media/platform/xilinx/xilinx-dma.c 	sizes[0] = dma->format.sizeimage;
dma               335 drivers/media/platform/xilinx/xilinx-dma.c 	struct xvip_dma *dma = vb2_get_drv_priv(vb->vb2_queue);
dma               338 drivers/media/platform/xilinx/xilinx-dma.c 	buf->dma = dma;
dma               346 drivers/media/platform/xilinx/xilinx-dma.c 	struct xvip_dma *dma = vb2_get_drv_priv(vb->vb2_queue);
dma               352 drivers/media/platform/xilinx/xilinx-dma.c 	if (dma->queue.type == V4L2_BUF_TYPE_VIDEO_CAPTURE) {
dma               354 drivers/media/platform/xilinx/xilinx-dma.c 		dma->xt.dir = DMA_DEV_TO_MEM;
dma               355 drivers/media/platform/xilinx/xilinx-dma.c 		dma->xt.src_sgl = false;
dma               356 drivers/media/platform/xilinx/xilinx-dma.c 		dma->xt.dst_sgl = true;
dma               357 drivers/media/platform/xilinx/xilinx-dma.c 		dma->xt.dst_start = addr;
dma               360 drivers/media/platform/xilinx/xilinx-dma.c 		dma->xt.dir = DMA_MEM_TO_DEV;
dma               361 drivers/media/platform/xilinx/xilinx-dma.c 		dma->xt.src_sgl = true;
dma               362 drivers/media/platform/xilinx/xilinx-dma.c 		dma->xt.dst_sgl = false;
dma               363 drivers/media/platform/xilinx/xilinx-dma.c 		dma->xt.src_start = addr;
dma               366 drivers/media/platform/xilinx/xilinx-dma.c 	dma->xt.frame_size = 1;
dma               367 drivers/media/platform/xilinx/xilinx-dma.c 	dma->sgl[0].size = dma->format.width * dma->fmtinfo->bpp;
dma               368 drivers/media/platform/xilinx/xilinx-dma.c 	dma->sgl[0].icg = dma->format.bytesperline - dma->sgl[0].size;
dma               369 drivers/media/platform/xilinx/xilinx-dma.c 	dma->xt.numf = dma->format.height;
dma               371 drivers/media/platform/xilinx/xilinx-dma.c 	desc = dmaengine_prep_interleaved_dma(dma->dma, &dma->xt, flags);
dma               373 drivers/media/platform/xilinx/xilinx-dma.c 		dev_err(dma->xdev->dev, "Failed to prepare DMA transfer\n");
dma               380 drivers/media/platform/xilinx/xilinx-dma.c 	spin_lock_irq(&dma->queued_lock);
dma               381 drivers/media/platform/xilinx/xilinx-dma.c 	list_add_tail(&buf->queue, &dma->queued_bufs);
dma               382 drivers/media/platform/xilinx/xilinx-dma.c 	spin_unlock_irq(&dma->queued_lock);
dma               386 drivers/media/platform/xilinx/xilinx-dma.c 	if (vb2_is_streaming(&dma->queue))
dma               387 drivers/media/platform/xilinx/xilinx-dma.c 		dma_async_issue_pending(dma->dma);
dma               392 drivers/media/platform/xilinx/xilinx-dma.c 	struct xvip_dma *dma = vb2_get_drv_priv(vq);
dma               397 drivers/media/platform/xilinx/xilinx-dma.c 	dma->sequence = 0;
dma               406 drivers/media/platform/xilinx/xilinx-dma.c 	pipe = dma->video.entity.pipe
dma               407 drivers/media/platform/xilinx/xilinx-dma.c 	     ? to_xvip_pipeline(&dma->video.entity) : &dma->pipe;
dma               409 drivers/media/platform/xilinx/xilinx-dma.c 	ret = media_pipeline_start(&dma->video.entity, &pipe->pipe);
dma               416 drivers/media/platform/xilinx/xilinx-dma.c 	ret = xvip_dma_verify_format(dma);
dma               420 drivers/media/platform/xilinx/xilinx-dma.c 	ret = xvip_pipeline_prepare(pipe, dma);
dma               427 drivers/media/platform/xilinx/xilinx-dma.c 	dma_async_issue_pending(dma->dma);
dma               435 drivers/media/platform/xilinx/xilinx-dma.c 	media_pipeline_stop(&dma->video.entity);
dma               439 drivers/media/platform/xilinx/xilinx-dma.c 	spin_lock_irq(&dma->queued_lock);
dma               440 drivers/media/platform/xilinx/xilinx-dma.c 	list_for_each_entry_safe(buf, nbuf, &dma->queued_bufs, queue) {
dma               444 drivers/media/platform/xilinx/xilinx-dma.c 	spin_unlock_irq(&dma->queued_lock);
dma               451 drivers/media/platform/xilinx/xilinx-dma.c 	struct xvip_dma *dma = vb2_get_drv_priv(vq);
dma               452 drivers/media/platform/xilinx/xilinx-dma.c 	struct xvip_pipeline *pipe = to_xvip_pipeline(&dma->video.entity);
dma               459 drivers/media/platform/xilinx/xilinx-dma.c 	dmaengine_terminate_all(dma->dma);
dma               463 drivers/media/platform/xilinx/xilinx-dma.c 	media_pipeline_stop(&dma->video.entity);
dma               466 drivers/media/platform/xilinx/xilinx-dma.c 	spin_lock_irq(&dma->queued_lock);
dma               467 drivers/media/platform/xilinx/xilinx-dma.c 	list_for_each_entry_safe(buf, nbuf, &dma->queued_bufs, queue) {
dma               471 drivers/media/platform/xilinx/xilinx-dma.c 	spin_unlock_irq(&dma->queued_lock);
dma               492 drivers/media/platform/xilinx/xilinx-dma.c 	struct xvip_dma *dma = to_xvip_dma(vfh->vdev);
dma               494 drivers/media/platform/xilinx/xilinx-dma.c 	cap->capabilities = dma->xdev->v4l2_caps | V4L2_CAP_STREAMING |
dma               498 drivers/media/platform/xilinx/xilinx-dma.c 	strscpy(cap->card, dma->video.name, sizeof(cap->card));
dma               500 drivers/media/platform/xilinx/xilinx-dma.c 		 dma->xdev->dev->of_node, dma->port);
dma               514 drivers/media/platform/xilinx/xilinx-dma.c 	struct xvip_dma *dma = to_xvip_dma(vfh->vdev);
dma               519 drivers/media/platform/xilinx/xilinx-dma.c 	f->pixelformat = dma->format.pixelformat;
dma               528 drivers/media/platform/xilinx/xilinx-dma.c 	struct xvip_dma *dma = to_xvip_dma(vfh->vdev);
dma               530 drivers/media/platform/xilinx/xilinx-dma.c 	format->fmt.pix = dma->format;
dma               536 drivers/media/platform/xilinx/xilinx-dma.c __xvip_dma_try_format(struct xvip_dma *dma, struct v4l2_pix_format *pix,
dma               562 drivers/media/platform/xilinx/xilinx-dma.c 	align = lcm(dma->align, info->bpp);
dma               576 drivers/media/platform/xilinx/xilinx-dma.c 	max_bpl = rounddown(XVIP_DMA_MAX_WIDTH, dma->align);
dma               577 drivers/media/platform/xilinx/xilinx-dma.c 	bpl = rounddown(pix->bytesperline, dma->align);
dma               590 drivers/media/platform/xilinx/xilinx-dma.c 	struct xvip_dma *dma = to_xvip_dma(vfh->vdev);
dma               592 drivers/media/platform/xilinx/xilinx-dma.c 	__xvip_dma_try_format(dma, &format->fmt.pix, NULL);
dma               600 drivers/media/platform/xilinx/xilinx-dma.c 	struct xvip_dma *dma = to_xvip_dma(vfh->vdev);
dma               603 drivers/media/platform/xilinx/xilinx-dma.c 	__xvip_dma_try_format(dma, &format->fmt.pix, &info);
dma               605 drivers/media/platform/xilinx/xilinx-dma.c 	if (vb2_is_busy(&dma->queue))
dma               608 drivers/media/platform/xilinx/xilinx-dma.c 	dma->format = format->fmt.pix;
dma               609 drivers/media/platform/xilinx/xilinx-dma.c 	dma->fmtinfo = info;
dma               650 drivers/media/platform/xilinx/xilinx-dma.c int xvip_dma_init(struct xvip_composite_device *xdev, struct xvip_dma *dma,
dma               656 drivers/media/platform/xilinx/xilinx-dma.c 	dma->xdev = xdev;
dma               657 drivers/media/platform/xilinx/xilinx-dma.c 	dma->port = port;
dma               658 drivers/media/platform/xilinx/xilinx-dma.c 	mutex_init(&dma->lock);
dma               659 drivers/media/platform/xilinx/xilinx-dma.c 	mutex_init(&dma->pipe.lock);
dma               660 drivers/media/platform/xilinx/xilinx-dma.c 	INIT_LIST_HEAD(&dma->queued_bufs);
dma               661 drivers/media/platform/xilinx/xilinx-dma.c 	spin_lock_init(&dma->queued_lock);
dma               663 drivers/media/platform/xilinx/xilinx-dma.c 	dma->fmtinfo = xvip_get_format_by_fourcc(XVIP_DMA_DEF_FORMAT);
dma               664 drivers/media/platform/xilinx/xilinx-dma.c 	dma->format.pixelformat = dma->fmtinfo->fourcc;
dma               665 drivers/media/platform/xilinx/xilinx-dma.c 	dma->format.colorspace = V4L2_COLORSPACE_SRGB;
dma               666 drivers/media/platform/xilinx/xilinx-dma.c 	dma->format.field = V4L2_FIELD_NONE;
dma               667 drivers/media/platform/xilinx/xilinx-dma.c 	dma->format.width = XVIP_DMA_DEF_WIDTH;
dma               668 drivers/media/platform/xilinx/xilinx-dma.c 	dma->format.height = XVIP_DMA_DEF_HEIGHT;
dma               669 drivers/media/platform/xilinx/xilinx-dma.c 	dma->format.bytesperline = dma->format.width * dma->fmtinfo->bpp;
dma               670 drivers/media/platform/xilinx/xilinx-dma.c 	dma->format.sizeimage = dma->format.bytesperline * dma->format.height;
dma               673 drivers/media/platform/xilinx/xilinx-dma.c 	dma->pad.flags = type == V4L2_BUF_TYPE_VIDEO_CAPTURE
dma               676 drivers/media/platform/xilinx/xilinx-dma.c 	ret = media_entity_pads_init(&dma->video.entity, 1, &dma->pad);
dma               681 drivers/media/platform/xilinx/xilinx-dma.c 	dma->video.fops = &xvip_dma_fops;
dma               682 drivers/media/platform/xilinx/xilinx-dma.c 	dma->video.v4l2_dev = &xdev->v4l2_dev;
dma               683 drivers/media/platform/xilinx/xilinx-dma.c 	dma->video.queue = &dma->queue;
dma               684 drivers/media/platform/xilinx/xilinx-dma.c 	snprintf(dma->video.name, sizeof(dma->video.name), "%pOFn %s %u",
dma               688 drivers/media/platform/xilinx/xilinx-dma.c 	dma->video.vfl_type = VFL_TYPE_GRABBER;
dma               689 drivers/media/platform/xilinx/xilinx-dma.c 	dma->video.vfl_dir = type == V4L2_BUF_TYPE_VIDEO_CAPTURE
dma               691 drivers/media/platform/xilinx/xilinx-dma.c 	dma->video.release = video_device_release_empty;
dma               692 drivers/media/platform/xilinx/xilinx-dma.c 	dma->video.ioctl_ops = &xvip_dma_ioctl_ops;
dma               693 drivers/media/platform/xilinx/xilinx-dma.c 	dma->video.lock = &dma->lock;
dma               694 drivers/media/platform/xilinx/xilinx-dma.c 	dma->video.device_caps = V4L2_CAP_STREAMING;
dma               696 drivers/media/platform/xilinx/xilinx-dma.c 		dma->video.device_caps |= V4L2_CAP_VIDEO_CAPTURE;
dma               698 drivers/media/platform/xilinx/xilinx-dma.c 		dma->video.device_caps |= V4L2_CAP_VIDEO_OUTPUT;
dma               700 drivers/media/platform/xilinx/xilinx-dma.c 	video_set_drvdata(&dma->video, dma);
dma               710 drivers/media/platform/xilinx/xilinx-dma.c 	dma->queue.type = type;
dma               711 drivers/media/platform/xilinx/xilinx-dma.c 	dma->queue.io_modes = VB2_MMAP | VB2_USERPTR | VB2_DMABUF;
dma               712 drivers/media/platform/xilinx/xilinx-dma.c 	dma->queue.lock = &dma->lock;
dma               713 drivers/media/platform/xilinx/xilinx-dma.c 	dma->queue.drv_priv = dma;
dma               714 drivers/media/platform/xilinx/xilinx-dma.c 	dma->queue.buf_struct_size = sizeof(struct xvip_dma_buffer);
dma               715 drivers/media/platform/xilinx/xilinx-dma.c 	dma->queue.ops = &xvip_dma_queue_qops;
dma               716 drivers/media/platform/xilinx/xilinx-dma.c 	dma->queue.mem_ops = &vb2_dma_contig_memops;
dma               717 drivers/media/platform/xilinx/xilinx-dma.c 	dma->queue.timestamp_flags = V4L2_BUF_FLAG_TIMESTAMP_MONOTONIC
dma               719 drivers/media/platform/xilinx/xilinx-dma.c 	dma->queue.dev = dma->xdev->dev;
dma               720 drivers/media/platform/xilinx/xilinx-dma.c 	ret = vb2_queue_init(&dma->queue);
dma               722 drivers/media/platform/xilinx/xilinx-dma.c 		dev_err(dma->xdev->dev, "failed to initialize VB2 queue\n");
dma               728 drivers/media/platform/xilinx/xilinx-dma.c 	dma->dma = dma_request_slave_channel(dma->xdev->dev, name);
dma               729 drivers/media/platform/xilinx/xilinx-dma.c 	if (dma->dma == NULL) {
dma               730 drivers/media/platform/xilinx/xilinx-dma.c 		dev_err(dma->xdev->dev, "no VDMA channel found\n");
dma               735 drivers/media/platform/xilinx/xilinx-dma.c 	dma->align = 1 << dma->dma->device->copy_align;
dma               737 drivers/media/platform/xilinx/xilinx-dma.c 	ret = video_register_device(&dma->video, VFL_TYPE_GRABBER, -1);
dma               739 drivers/media/platform/xilinx/xilinx-dma.c 		dev_err(dma->xdev->dev, "failed to register video device\n");
dma               746 drivers/media/platform/xilinx/xilinx-dma.c 	xvip_dma_cleanup(dma);
dma               750 drivers/media/platform/xilinx/xilinx-dma.c void xvip_dma_cleanup(struct xvip_dma *dma)
dma               752 drivers/media/platform/xilinx/xilinx-dma.c 	if (video_is_registered(&dma->video))
dma               753 drivers/media/platform/xilinx/xilinx-dma.c 		video_unregister_device(&dma->video);
dma               755 drivers/media/platform/xilinx/xilinx-dma.c 	if (dma->dma)
dma               756 drivers/media/platform/xilinx/xilinx-dma.c 		dma_release_channel(dma->dma);
dma               758 drivers/media/platform/xilinx/xilinx-dma.c 	media_entity_cleanup(&dma->video.entity);
dma               760 drivers/media/platform/xilinx/xilinx-dma.c 	mutex_destroy(&dma->lock);
dma               761 drivers/media/platform/xilinx/xilinx-dma.c 	mutex_destroy(&dma->pipe.lock);
dma                92 drivers/media/platform/xilinx/xilinx-dma.h 	struct dma_chan *dma;
dma               100 drivers/media/platform/xilinx/xilinx-dma.h int xvip_dma_init(struct xvip_composite_device *xdev, struct xvip_dma *dma,
dma               102 drivers/media/platform/xilinx/xilinx-dma.h void xvip_dma_cleanup(struct xvip_dma *dma);
dma               175 drivers/media/platform/xilinx/xilinx-vipp.c 	struct xvip_dma *dma;
dma               177 drivers/media/platform/xilinx/xilinx-vipp.c 	list_for_each_entry(dma, &xdev->dmas, list) {
dma               178 drivers/media/platform/xilinx/xilinx-vipp.c 		if (dma->port == port)
dma               179 drivers/media/platform/xilinx/xilinx-vipp.c 			return dma;
dma               196 drivers/media/platform/xilinx/xilinx-vipp.c 	struct xvip_dma *dma;
dma               217 drivers/media/platform/xilinx/xilinx-vipp.c 		dma = xvip_graph_find_dma(xdev, link.local_port);
dma               218 drivers/media/platform/xilinx/xilinx-vipp.c 		if (dma == NULL) {
dma               227 drivers/media/platform/xilinx/xilinx-vipp.c 			dma->video.name);
dma               248 drivers/media/platform/xilinx/xilinx-vipp.c 		if (dma->pad.flags & MEDIA_PAD_FL_SOURCE) {
dma               249 drivers/media/platform/xilinx/xilinx-vipp.c 			source = &dma->video.entity;
dma               250 drivers/media/platform/xilinx/xilinx-vipp.c 			source_pad = &dma->pad;
dma               256 drivers/media/platform/xilinx/xilinx-vipp.c 			sink = &dma->video.entity;
dma               257 drivers/media/platform/xilinx/xilinx-vipp.c 			sink_pad = &dma->pad;
dma               434 drivers/media/platform/xilinx/xilinx-vipp.c 	struct xvip_dma *dma;
dma               453 drivers/media/platform/xilinx/xilinx-vipp.c 	dma = devm_kzalloc(xdev->dev, sizeof(*dma), GFP_KERNEL);
dma               454 drivers/media/platform/xilinx/xilinx-vipp.c 	if (dma == NULL)
dma               457 drivers/media/platform/xilinx/xilinx-vipp.c 	ret = xvip_dma_init(xdev, dma, type, index);
dma               463 drivers/media/platform/xilinx/xilinx-vipp.c 	list_add_tail(&dma->list, &xdev->dmas);
dma               497 drivers/media/platform/xilinx/xilinx-vipp.c 	struct xvip_dma *dma;
dma               502 drivers/media/platform/xilinx/xilinx-vipp.c 	list_for_each_entry_safe(dma, dmap, &xdev->dmas, list) {
dma               503 drivers/media/platform/xilinx/xilinx-vipp.c 		xvip_dma_cleanup(dma);
dma               504 drivers/media/platform/xilinx/xilinx-vipp.c 		list_del(&dma->list);
dma              1570 drivers/media/usb/uvc/uvc_video.c 				  uvc_urb->buffer, uvc_urb->dma);
dma              1617 drivers/media/usb/uvc/uvc_video.c 				gfp_flags | __GFP_NOWARN, &uvc_urb->dma);
dma              1733 drivers/media/usb/uvc/uvc_video.c 		urb->transfer_dma = uvc_urb->dma;
dma              1798 drivers/media/usb/uvc/uvc_video.c 		urb->transfer_dma = uvc_urb->dma;
dma               528 drivers/media/usb/uvc/uvcvideo.h 	dma_addr_t dma;
dma               141 drivers/media/v4l2-core/videobuf-dma-sg.c 	return &mem->dma;
dma               145 drivers/media/v4l2-core/videobuf-dma-sg.c static void videobuf_dma_init(struct videobuf_dmabuf *dma)
dma               147 drivers/media/v4l2-core/videobuf-dma-sg.c 	memset(dma, 0, sizeof(*dma));
dma               148 drivers/media/v4l2-core/videobuf-dma-sg.c 	dma->magic = MAGIC_DMABUF;
dma               151 drivers/media/v4l2-core/videobuf-dma-sg.c static int videobuf_dma_init_user_locked(struct videobuf_dmabuf *dma,
dma               158 drivers/media/v4l2-core/videobuf-dma-sg.c 	dma->direction = direction;
dma               159 drivers/media/v4l2-core/videobuf-dma-sg.c 	switch (dma->direction) {
dma               172 drivers/media/v4l2-core/videobuf-dma-sg.c 	dma->offset = data & ~PAGE_MASK;
dma               173 drivers/media/v4l2-core/videobuf-dma-sg.c 	dma->size = size;
dma               174 drivers/media/v4l2-core/videobuf-dma-sg.c 	dma->nr_pages = last-first+1;
dma               175 drivers/media/v4l2-core/videobuf-dma-sg.c 	dma->pages = kmalloc_array(dma->nr_pages, sizeof(struct page *),
dma               177 drivers/media/v4l2-core/videobuf-dma-sg.c 	if (NULL == dma->pages)
dma               184 drivers/media/v4l2-core/videobuf-dma-sg.c 		data, size, dma->nr_pages);
dma               186 drivers/media/v4l2-core/videobuf-dma-sg.c 	err = get_user_pages(data & PAGE_MASK, dma->nr_pages,
dma               187 drivers/media/v4l2-core/videobuf-dma-sg.c 			     flags | FOLL_LONGTERM, dma->pages, NULL);
dma               189 drivers/media/v4l2-core/videobuf-dma-sg.c 	if (err != dma->nr_pages) {
dma               190 drivers/media/v4l2-core/videobuf-dma-sg.c 		dma->nr_pages = (err >= 0) ? err : 0;
dma               192 drivers/media/v4l2-core/videobuf-dma-sg.c 			dma->nr_pages);
dma               198 drivers/media/v4l2-core/videobuf-dma-sg.c static int videobuf_dma_init_user(struct videobuf_dmabuf *dma, int direction,
dma               204 drivers/media/v4l2-core/videobuf-dma-sg.c 	ret = videobuf_dma_init_user_locked(dma, direction, data, size);
dma               210 drivers/media/v4l2-core/videobuf-dma-sg.c static int videobuf_dma_init_kernel(struct videobuf_dmabuf *dma, int direction,
dma               217 drivers/media/v4l2-core/videobuf-dma-sg.c 	dma->direction = direction;
dma               218 drivers/media/v4l2-core/videobuf-dma-sg.c 	dma->vaddr_pages = kcalloc(nr_pages, sizeof(*dma->vaddr_pages),
dma               220 drivers/media/v4l2-core/videobuf-dma-sg.c 	if (!dma->vaddr_pages)
dma               223 drivers/media/v4l2-core/videobuf-dma-sg.c 	dma->dma_addr = kcalloc(nr_pages, sizeof(*dma->dma_addr), GFP_KERNEL);
dma               224 drivers/media/v4l2-core/videobuf-dma-sg.c 	if (!dma->dma_addr) {
dma               225 drivers/media/v4l2-core/videobuf-dma-sg.c 		kfree(dma->vaddr_pages);
dma               231 drivers/media/v4l2-core/videobuf-dma-sg.c 		addr = dma_alloc_coherent(dma->dev, PAGE_SIZE,
dma               232 drivers/media/v4l2-core/videobuf-dma-sg.c 					  &(dma->dma_addr[i]), GFP_KERNEL);
dma               236 drivers/media/v4l2-core/videobuf-dma-sg.c 		dma->vaddr_pages[i] = virt_to_page(addr);
dma               238 drivers/media/v4l2-core/videobuf-dma-sg.c 	dma->vaddr = vmap(dma->vaddr_pages, nr_pages, VM_MAP | VM_IOREMAP,
dma               240 drivers/media/v4l2-core/videobuf-dma-sg.c 	if (NULL == dma->vaddr) {
dma               246 drivers/media/v4l2-core/videobuf-dma-sg.c 		dma->vaddr, nr_pages << PAGE_SHIFT);
dma               248 drivers/media/v4l2-core/videobuf-dma-sg.c 	memset(dma->vaddr, 0, nr_pages << PAGE_SHIFT);
dma               249 drivers/media/v4l2-core/videobuf-dma-sg.c 	dma->nr_pages = nr_pages;
dma               257 drivers/media/v4l2-core/videobuf-dma-sg.c 		addr = page_address(dma->vaddr_pages[i]);
dma               258 drivers/media/v4l2-core/videobuf-dma-sg.c 		dma_free_coherent(dma->dev, PAGE_SIZE, addr, dma->dma_addr[i]);
dma               260 drivers/media/v4l2-core/videobuf-dma-sg.c 	kfree(dma->dma_addr);
dma               261 drivers/media/v4l2-core/videobuf-dma-sg.c 	dma->dma_addr = NULL;
dma               262 drivers/media/v4l2-core/videobuf-dma-sg.c 	kfree(dma->vaddr_pages);
dma               263 drivers/media/v4l2-core/videobuf-dma-sg.c 	dma->vaddr_pages = NULL;
dma               269 drivers/media/v4l2-core/videobuf-dma-sg.c static int videobuf_dma_init_overlay(struct videobuf_dmabuf *dma, int direction,
dma               274 drivers/media/v4l2-core/videobuf-dma-sg.c 	dma->direction = direction;
dma               279 drivers/media/v4l2-core/videobuf-dma-sg.c 	dma->bus_addr = addr;
dma               280 drivers/media/v4l2-core/videobuf-dma-sg.c 	dma->nr_pages = nr_pages;
dma               285 drivers/media/v4l2-core/videobuf-dma-sg.c static int videobuf_dma_map(struct device *dev, struct videobuf_dmabuf *dma)
dma               287 drivers/media/v4l2-core/videobuf-dma-sg.c 	MAGIC_CHECK(dma->magic, MAGIC_DMABUF);
dma               288 drivers/media/v4l2-core/videobuf-dma-sg.c 	BUG_ON(0 == dma->nr_pages);
dma               290 drivers/media/v4l2-core/videobuf-dma-sg.c 	if (dma->pages) {
dma               291 drivers/media/v4l2-core/videobuf-dma-sg.c 		dma->sglist = videobuf_pages_to_sg(dma->pages, dma->nr_pages,
dma               292 drivers/media/v4l2-core/videobuf-dma-sg.c 						   dma->offset, dma->size);
dma               294 drivers/media/v4l2-core/videobuf-dma-sg.c 	if (dma->vaddr) {
dma               295 drivers/media/v4l2-core/videobuf-dma-sg.c 		dma->sglist = videobuf_vmalloc_to_sg(dma->vaddr,
dma               296 drivers/media/v4l2-core/videobuf-dma-sg.c 						     dma->nr_pages);
dma               298 drivers/media/v4l2-core/videobuf-dma-sg.c 	if (dma->bus_addr) {
dma               299 drivers/media/v4l2-core/videobuf-dma-sg.c 		dma->sglist = vmalloc(sizeof(*dma->sglist));
dma               300 drivers/media/v4l2-core/videobuf-dma-sg.c 		if (NULL != dma->sglist) {
dma               301 drivers/media/v4l2-core/videobuf-dma-sg.c 			dma->sglen = 1;
dma               302 drivers/media/v4l2-core/videobuf-dma-sg.c 			sg_dma_address(&dma->sglist[0])	= dma->bus_addr
dma               304 drivers/media/v4l2-core/videobuf-dma-sg.c 			dma->sglist[0].offset = dma->bus_addr & ~PAGE_MASK;
dma               305 drivers/media/v4l2-core/videobuf-dma-sg.c 			sg_dma_len(&dma->sglist[0]) = dma->nr_pages * PAGE_SIZE;
dma               308 drivers/media/v4l2-core/videobuf-dma-sg.c 	if (NULL == dma->sglist) {
dma               312 drivers/media/v4l2-core/videobuf-dma-sg.c 	if (!dma->bus_addr) {
dma               313 drivers/media/v4l2-core/videobuf-dma-sg.c 		dma->sglen = dma_map_sg(dev, dma->sglist,
dma               314 drivers/media/v4l2-core/videobuf-dma-sg.c 					dma->nr_pages, dma->direction);
dma               315 drivers/media/v4l2-core/videobuf-dma-sg.c 		if (0 == dma->sglen) {
dma               318 drivers/media/v4l2-core/videobuf-dma-sg.c 			vfree(dma->sglist);
dma               319 drivers/media/v4l2-core/videobuf-dma-sg.c 			dma->sglist = NULL;
dma               320 drivers/media/v4l2-core/videobuf-dma-sg.c 			dma->sglen = 0;
dma               328 drivers/media/v4l2-core/videobuf-dma-sg.c int videobuf_dma_unmap(struct device *dev, struct videobuf_dmabuf *dma)
dma               330 drivers/media/v4l2-core/videobuf-dma-sg.c 	MAGIC_CHECK(dma->magic, MAGIC_DMABUF);
dma               332 drivers/media/v4l2-core/videobuf-dma-sg.c 	if (!dma->sglen)
dma               335 drivers/media/v4l2-core/videobuf-dma-sg.c 	dma_unmap_sg(dev, dma->sglist, dma->nr_pages, dma->direction);
dma               337 drivers/media/v4l2-core/videobuf-dma-sg.c 	vfree(dma->sglist);
dma               338 drivers/media/v4l2-core/videobuf-dma-sg.c 	dma->sglist = NULL;
dma               339 drivers/media/v4l2-core/videobuf-dma-sg.c 	dma->sglen = 0;
dma               345 drivers/media/v4l2-core/videobuf-dma-sg.c int videobuf_dma_free(struct videobuf_dmabuf *dma)
dma               348 drivers/media/v4l2-core/videobuf-dma-sg.c 	MAGIC_CHECK(dma->magic, MAGIC_DMABUF);
dma               349 drivers/media/v4l2-core/videobuf-dma-sg.c 	BUG_ON(dma->sglen);
dma               351 drivers/media/v4l2-core/videobuf-dma-sg.c 	if (dma->pages) {
dma               352 drivers/media/v4l2-core/videobuf-dma-sg.c 		for (i = 0; i < dma->nr_pages; i++) {
dma               353 drivers/media/v4l2-core/videobuf-dma-sg.c 			if (dma->direction == DMA_FROM_DEVICE)
dma               354 drivers/media/v4l2-core/videobuf-dma-sg.c 				set_page_dirty_lock(dma->pages[i]);
dma               355 drivers/media/v4l2-core/videobuf-dma-sg.c 			put_page(dma->pages[i]);
dma               357 drivers/media/v4l2-core/videobuf-dma-sg.c 		kfree(dma->pages);
dma               358 drivers/media/v4l2-core/videobuf-dma-sg.c 		dma->pages = NULL;
dma               361 drivers/media/v4l2-core/videobuf-dma-sg.c 	if (dma->dma_addr) {
dma               362 drivers/media/v4l2-core/videobuf-dma-sg.c 		for (i = 0; i < dma->nr_pages; i++) {
dma               365 drivers/media/v4l2-core/videobuf-dma-sg.c 			addr = page_address(dma->vaddr_pages[i]);
dma               366 drivers/media/v4l2-core/videobuf-dma-sg.c 			dma_free_coherent(dma->dev, PAGE_SIZE, addr,
dma               367 drivers/media/v4l2-core/videobuf-dma-sg.c 					  dma->dma_addr[i]);
dma               369 drivers/media/v4l2-core/videobuf-dma-sg.c 		kfree(dma->dma_addr);
dma               370 drivers/media/v4l2-core/videobuf-dma-sg.c 		dma->dma_addr = NULL;
dma               371 drivers/media/v4l2-core/videobuf-dma-sg.c 		kfree(dma->vaddr_pages);
dma               372 drivers/media/v4l2-core/videobuf-dma-sg.c 		dma->vaddr_pages = NULL;
dma               373 drivers/media/v4l2-core/videobuf-dma-sg.c 		vunmap(dma->vaddr);
dma               374 drivers/media/v4l2-core/videobuf-dma-sg.c 		dma->vaddr = NULL;
dma               377 drivers/media/v4l2-core/videobuf-dma-sg.c 	if (dma->bus_addr)
dma               378 drivers/media/v4l2-core/videobuf-dma-sg.c 		dma->bus_addr = 0;
dma               379 drivers/media/v4l2-core/videobuf-dma-sg.c 	dma->direction = DMA_NONE;
dma               483 drivers/media/v4l2-core/videobuf-dma-sg.c 	videobuf_dma_init(&mem->dma);
dma               499 drivers/media/v4l2-core/videobuf-dma-sg.c 	return mem->dma.vaddr;
dma               513 drivers/media/v4l2-core/videobuf-dma-sg.c 	if (!mem->dma.dev)
dma               514 drivers/media/v4l2-core/videobuf-dma-sg.c 		mem->dma.dev = q->dev;
dma               516 drivers/media/v4l2-core/videobuf-dma-sg.c 		WARN_ON(mem->dma.dev != q->dev);
dma               524 drivers/media/v4l2-core/videobuf-dma-sg.c 			err = videobuf_dma_init_kernel(&mem->dma,
dma               531 drivers/media/v4l2-core/videobuf-dma-sg.c 			err = videobuf_dma_init_user(&mem->dma,
dma               542 drivers/media/v4l2-core/videobuf-dma-sg.c 			err = videobuf_dma_init_user_locked(&mem->dma,
dma               560 drivers/media/v4l2-core/videobuf-dma-sg.c 		err = videobuf_dma_init_overlay(&mem->dma, DMA_FROM_DEVICE,
dma               568 drivers/media/v4l2-core/videobuf-dma-sg.c 	err = videobuf_dma_map(q->dev, &mem->dma);
dma               579 drivers/media/v4l2-core/videobuf-dma-sg.c 	BUG_ON(!mem || !mem->dma.sglen);
dma               582 drivers/media/v4l2-core/videobuf-dma-sg.c 	MAGIC_CHECK(mem->dma.magic, MAGIC_DMABUF);
dma               584 drivers/media/v4l2-core/videobuf-dma-sg.c 	dma_sync_sg_for_cpu(q->dev, mem->dma.sglist,
dma               585 drivers/media/v4l2-core/videobuf-dma-sg.c 			    mem->dma.nr_pages, mem->dma.direction);
dma               538 drivers/message/fusion/mptbase.h 		dma_addr_t	 dma;
dma               860 drivers/message/fusion/mptfc.c 	cfg.physAddr = ioc->fc_data.fc_port_page1[portnum].dma;
dma               915 drivers/message/fusion/mptfc.c 		page1_dma = ioc->fc_data.fc_port_page1[portnum].dma;
dma               933 drivers/message/fusion/mptfc.c 		ioc->fc_data.fc_port_page1[portnum].dma = page1_dma;
dma              1522 drivers/message/fusion/mptfc.c 				ioc->fc_data.fc_port_page1[ii].dma);
dma                84 drivers/message/fusion/mptlan.c 	dma_addr_t	dma;
dma               522 drivers/message/fusion/mptlan.c 			pci_unmap_single(mpt_dev->pcidev, priv->RcvCtl[i].dma,
dma               534 drivers/message/fusion/mptlan.c 			pci_unmap_single(mpt_dev->pcidev, priv->SendCtl[i].dma,
dma               588 drivers/message/fusion/mptlan.c 	pci_unmap_single(mpt_dev->pcidev, priv->SendCtl[ctx].dma,
dma               654 drivers/message/fusion/mptlan.c 		pci_unmap_single(mpt_dev->pcidev, priv->SendCtl[ctx].dma,
dma               684 drivers/message/fusion/mptlan.c 	dma_addr_t dma;
dma               726 drivers/message/fusion/mptlan.c         dma = pci_map_single(mpt_dev->pcidev, skb->data, skb->len,
dma               730 drivers/message/fusion/mptlan.c 	priv->SendCtl[ctx].dma = dma;
dma               778 drivers/message/fusion/mptlan.c 	pSimple->Address.Low = cpu_to_le32((u32) dma);
dma               780 drivers/message/fusion/mptlan.c 		pSimple->Address.High = cpu_to_le32((u32) ((u64) dma >> 32));
dma               874 drivers/message/fusion/mptlan.c 		pci_dma_sync_single_for_cpu(mpt_dev->pcidev, priv->RcvCtl[ctx].dma,
dma               879 drivers/message/fusion/mptlan.c 		pci_dma_sync_single_for_device(mpt_dev->pcidev, priv->RcvCtl[ctx].dma,
dma               888 drivers/message/fusion/mptlan.c 	pci_unmap_single(mpt_dev->pcidev, priv->RcvCtl[ctx].dma,
dma               933 drivers/message/fusion/mptlan.c 		pci_unmap_single(mpt_dev->pcidev, priv->RcvCtl[ctx].dma,
dma              1035 drivers/message/fusion/mptlan.c 						    priv->RcvCtl[ctx].dma,
dma              1041 drivers/message/fusion/mptlan.c 						       priv->RcvCtl[ctx].dma,
dma              1063 drivers/message/fusion/mptlan.c 					    priv->RcvCtl[ctx].dma,
dma              1070 drivers/message/fusion/mptlan.c 					       priv->RcvCtl[ctx].dma,
dma              1083 drivers/message/fusion/mptlan.c 		pci_unmap_single(mpt_dev->pcidev, priv->RcvCtl[ctx].dma,
dma              1085 drivers/message/fusion/mptlan.c 		priv->RcvCtl[ctx].dma = 0;
dma              1148 drivers/message/fusion/mptlan.c 	dma_addr_t dma;
dma              1206 drivers/message/fusion/mptlan.c 						 priv->RcvCtl[ctx].dma,
dma              1224 drivers/message/fusion/mptlan.c 				dma = pci_map_single(mpt_dev->pcidev, skb->data,
dma              1228 drivers/message/fusion/mptlan.c 				priv->RcvCtl[ctx].dma = dma;
dma              1245 drivers/message/fusion/mptlan.c 			pSimple->Address.Low = cpu_to_le32((u32) priv->RcvCtl[ctx].dma);
dma              1247 drivers/message/fusion/mptlan.c 				pSimple->Address.High = cpu_to_le32((u32) ((u64) priv->RcvCtl[ctx].dma >> 32));
dma                28 drivers/mfd/stm32-timers.c 	struct stm32_timers_dma *dma = p;
dma                32 drivers/mfd/stm32-timers.c 	status = dmaengine_tx_status(dma->chan, dma->chan->cookie, &state);
dma                34 drivers/mfd/stm32-timers.c 		complete(&dma->completion);
dma                57 drivers/mfd/stm32-timers.c 	struct stm32_timers_dma *dma = &ddata->dma;
dma                75 drivers/mfd/stm32-timers.c 	if (!dma->chans[id])
dma                77 drivers/mfd/stm32-timers.c 	mutex_lock(&dma->lock);
dma                80 drivers/mfd/stm32-timers.c 	dma->chan = dma->chans[id];
dma                89 drivers/mfd/stm32-timers.c 	config.src_addr = (dma_addr_t)dma->phys_base + TIM_DMAR;
dma                91 drivers/mfd/stm32-timers.c 	ret = dmaengine_slave_config(dma->chan, &config);
dma                95 drivers/mfd/stm32-timers.c 	desc = dmaengine_prep_slave_single(dma->chan, dma_buf, len,
dma               103 drivers/mfd/stm32-timers.c 	desc->callback_param = dma;
dma               109 drivers/mfd/stm32-timers.c 	reinit_completion(&dma->completion);
dma               110 drivers/mfd/stm32-timers.c 	dma_async_issue_pending(dma->chan);
dma               129 drivers/mfd/stm32-timers.c 	err = wait_for_completion_interruptible_timeout(&dma->completion,
dma               141 drivers/mfd/stm32-timers.c 	dmaengine_terminate_all(dma->chan);
dma               145 drivers/mfd/stm32-timers.c 	dma->chan = NULL;
dma               146 drivers/mfd/stm32-timers.c 	mutex_unlock(&dma->lock);
dma               176 drivers/mfd/stm32-timers.c 	init_completion(&ddata->dma.completion);
dma               177 drivers/mfd/stm32-timers.c 	mutex_init(&ddata->dma.lock);
dma               182 drivers/mfd/stm32-timers.c 		ddata->dma.chans[i] = dma_request_slave_channel(dev, name);
dma               184 drivers/mfd/stm32-timers.c 	ddata->dma.chans[STM32_TIMERS_DMA_UP] =
dma               186 drivers/mfd/stm32-timers.c 	ddata->dma.chans[STM32_TIMERS_DMA_TRIG] =
dma               188 drivers/mfd/stm32-timers.c 	ddata->dma.chans[STM32_TIMERS_DMA_COM] =
dma               198 drivers/mfd/stm32-timers.c 		if (ddata->dma.chans[i])
dma               199 drivers/mfd/stm32-timers.c 			dma_release_channel(ddata->dma.chans[i]);
dma               220 drivers/mfd/stm32-timers.c 	ddata->dma.phys_base = res->start;
dma                34 drivers/misc/cardreader/alcor_pci.c 	.dma = 0,
dma                38 drivers/misc/cardreader/alcor_pci.c 	.dma = 1,
dma               603 drivers/misc/mic/vop/vop_vringh.c 	bool dma = VOP_USE_DMA && vi->dma_ch;
dma               606 drivers/misc/mic/vop/vop_vringh.c 	if (dma) {
dma               612 drivers/misc/mic/vop/vop_vringh.c 			dma = false;
dma               615 drivers/misc/mic/vop/vop_vringh.c 			dma = false;
dma               619 drivers/misc/mic/vop/vop_vringh.c 	if (!dma)
dma              1022 drivers/mmc/host/alcor.c 	alcor_write8(priv, cfg->dma, AU6601_DMA_BOUNDARY);
dma               320 drivers/mmc/host/atmel-mci.c 	struct atmel_mci_dma	dma;
dma               937 drivers/mmc/host/atmel-mci.c 		dma_unmap_sg(host->dma.chan->device->dev,
dma              1117 drivers/mmc/host/atmel-mci.c 	chan = host->dma.chan;
dma              1148 drivers/mmc/host/atmel-mci.c 	host->dma.data_desc = desc;
dma              1181 drivers/mmc/host/atmel-mci.c 	struct dma_async_tx_descriptor	*desc = host->dma.data_desc;
dma              2350 drivers/mmc/host/atmel-mci.c 	host->dma.chan = dma_request_slave_channel_reason(&host->pdev->dev,
dma              2353 drivers/mmc/host/atmel-mci.c 	if (PTR_ERR(host->dma.chan) == -ENODEV) {
dma              2363 drivers/mmc/host/atmel-mci.c 		host->dma.chan = dma_request_channel(mask, pdata->dma_filter,
dma              2365 drivers/mmc/host/atmel-mci.c 		if (!host->dma.chan)
dma              2366 drivers/mmc/host/atmel-mci.c 			host->dma.chan = ERR_PTR(-ENODEV);
dma              2369 drivers/mmc/host/atmel-mci.c 	if (IS_ERR(host->dma.chan))
dma              2370 drivers/mmc/host/atmel-mci.c 		return PTR_ERR(host->dma.chan);
dma              2373 drivers/mmc/host/atmel-mci.c 		 dma_chan_name(host->dma.chan));
dma              2592 drivers/mmc/host/atmel-mci.c 	if (!IS_ERR(host->dma.chan))
dma              2593 drivers/mmc/host/atmel-mci.c 		dma_release_channel(host->dma.chan);
dma              2620 drivers/mmc/host/atmel-mci.c 	if (!IS_ERR(host->dma.chan))
dma              2621 drivers/mmc/host/atmel-mci.c 		dma_release_channel(host->dma.chan);
dma               103 drivers/mmc/host/au1xmmc.c 	} dma;
dma               244 drivers/mmc/host/au1xmmc.c 	host->dma.len = 0;
dma               245 drivers/mmc/host/au1xmmc.c 	host->dma.dir = 0;
dma               352 drivers/mmc/host/au1xmmc.c 	dma_unmap_sg(mmc_dev(host->mmc), data->sg, data->sg_len, host->dma.dir);
dma               463 drivers/mmc/host/au1xmmc.c 	if (host->pio.index < host->dma.len) {
dma               630 drivers/mmc/host/au1xmmc.c 	host->dma.dir = DMA_BIDIRECTIONAL;
dma               632 drivers/mmc/host/au1xmmc.c 	host->dma.len = dma_map_sg(mmc_dev(host->mmc), data->sg,
dma               633 drivers/mmc/host/au1xmmc.c 				   data->sg_len, host->dma.dir);
dma               635 drivers/mmc/host/au1xmmc.c 	if (host->dma.len == 0)
dma               646 drivers/mmc/host/au1xmmc.c 		for (i = 0; i < host->dma.len; i++) {
dma               653 drivers/mmc/host/au1xmmc.c 			if (i == host->dma.len - 1)
dma               685 drivers/mmc/host/au1xmmc.c 			host->dma.dir);
dma               564 drivers/mmc/host/mmc_spi.c 	dma_addr_t		dma = host->data_dma;
dma               567 drivers/mmc/host/mmc_spi.c 	if (dma)
dma               582 drivers/mmc/host/mmc_spi.c 		if (dma)
dma               583 drivers/mmc/host/mmc_spi.c 			t->tx_dma = dma + offsetof(struct scratch, data_token);
dma               603 drivers/mmc/host/mmc_spi.c 		if (dma)
dma               604 drivers/mmc/host/mmc_spi.c 			t->tx_dma = dma + offsetof(struct scratch, crc_val);
dma               609 drivers/mmc/host/mmc_spi.c 		if (dma)
dma               610 drivers/mmc/host/mmc_spi.c 			t->rx_dma = dma + offsetof(struct scratch, crc_val);
dma               635 drivers/mmc/host/mmc_spi.c 		if (dma)
dma               636 drivers/mmc/host/mmc_spi.c 			t->rx_dma = dma + offsetof(struct scratch, status);
dma               404 drivers/mmc/host/mtk-sd.c 	struct msdc_dma dma;	/* dma channel */
dma               611 drivers/mmc/host/mtk-sd.c static inline void msdc_dma_setup(struct msdc_host *host, struct msdc_dma *dma,
dma               623 drivers/mmc/host/mtk-sd.c 	gpd = dma->gpd;
dma               624 drivers/mmc/host/mtk-sd.c 	bd = dma->bd;
dma               673 drivers/mmc/host/mtk-sd.c 			      upper_32_bits(dma->gpd_addr) & 0xf);
dma               674 drivers/mmc/host/mtk-sd.c 	writel(lower_32_bits(dma->gpd_addr), host->base + MSDC_DMA_SA);
dma               977 drivers/mmc/host/mtk-sd.c 	msdc_dma_setup(host, &host->dma, data);
dma              1623 drivers/mmc/host/mtk-sd.c static void msdc_init_gpd_bd(struct msdc_host *host, struct msdc_dma *dma)
dma              1625 drivers/mmc/host/mtk-sd.c 	struct mt_gpdma_desc *gpd = dma->gpd;
dma              1626 drivers/mmc/host/mtk-sd.c 	struct mt_bdma_desc *bd = dma->bd;
dma              1632 drivers/mmc/host/mtk-sd.c 	dma_addr = dma->gpd_addr + sizeof(struct mt_gpdma_desc);
dma              1641 drivers/mmc/host/mtk-sd.c 	dma_addr = dma->bd_addr;
dma              1642 drivers/mmc/host/mtk-sd.c 	gpd->ptr = lower_32_bits(dma->bd_addr); /* physical address */
dma              1648 drivers/mmc/host/mtk-sd.c 		dma_addr = dma->bd_addr + sizeof(*bd) * (i + 1);
dma              2305 drivers/mmc/host/mtk-sd.c 	host->dma.gpd = dma_alloc_coherent(&pdev->dev,
dma              2307 drivers/mmc/host/mtk-sd.c 				&host->dma.gpd_addr, GFP_KERNEL);
dma              2308 drivers/mmc/host/mtk-sd.c 	host->dma.bd = dma_alloc_coherent(&pdev->dev,
dma              2310 drivers/mmc/host/mtk-sd.c 				&host->dma.bd_addr, GFP_KERNEL);
dma              2311 drivers/mmc/host/mtk-sd.c 	if (!host->dma.gpd || !host->dma.bd) {
dma              2315 drivers/mmc/host/mtk-sd.c 	msdc_init_gpd_bd(host, &host->dma);
dma              2345 drivers/mmc/host/mtk-sd.c 	if (host->dma.gpd)
dma              2348 drivers/mmc/host/mtk-sd.c 			host->dma.gpd, host->dma.gpd_addr);
dma              2349 drivers/mmc/host/mtk-sd.c 	if (host->dma.bd)
dma              2352 drivers/mmc/host/mtk-sd.c 			host->dma.bd, host->dma.bd_addr);
dma              2378 drivers/mmc/host/mtk-sd.c 			host->dma.gpd, host->dma.gpd_addr);
dma              2380 drivers/mmc/host/mtk-sd.c 			host->dma.bd, host->dma.bd_addr);
dma               125 drivers/mmc/host/mxcmmc.c 	struct dma_chan		*dma;
dma               332 drivers/mmc/host/mxcmmc.c 	nents = dma_map_sg(host->dma->device->dev, data->sg,
dma               337 drivers/mmc/host/mxcmmc.c 	host->desc = dmaengine_prep_slave_sg(host->dma,
dma               342 drivers/mmc/host/mxcmmc.c 		dma_unmap_sg(host->dma->device->dev, data->sg, data->sg_len,
dma               350 drivers/mmc/host/mxcmmc.c 	dma_async_issue_pending(host->dma);
dma               452 drivers/mmc/host/mxcmmc.c 		dma_unmap_sg(host->dma->device->dev, data->sg, data->sg_len,
dma               766 drivers/mmc/host/mxcmmc.c 	if (host->dma)
dma               836 drivers/mmc/host/mxcmmc.c 	return dmaengine_slave_config(host->dma, config);
dma               859 drivers/mmc/host/mxcmmc.c 			dma_release_channel(host->dma);
dma               861 drivers/mmc/host/mxcmmc.c 			host->dma = NULL;
dma               967 drivers/mmc/host/mxcmmc.c 		dmaengine_terminate_all(host->dma);
dma              1124 drivers/mmc/host/mxcmmc.c 		host->dma = dma_request_slave_channel(&pdev->dev, "rx-tx");
dma              1134 drivers/mmc/host/mxcmmc.c 			host->dma = dma_request_channel(mask, filter, host);
dma              1137 drivers/mmc/host/mxcmmc.c 	if (host->dma)
dma              1139 drivers/mmc/host/mxcmmc.c 				host->dma->device->dev);
dma              1166 drivers/mmc/host/mxcmmc.c 	if (host->dma)
dma              1167 drivers/mmc/host/mxcmmc.c 		dma_release_channel(host->dma);
dma              1190 drivers/mmc/host/mxcmmc.c 	if (host->dma)
dma              1191 drivers/mmc/host/mxcmmc.c 		dma_release_channel(host->dma);
dma               899 drivers/mmc/host/s3cmci.c 			dmaengine_terminate_all(host->dma);
dma              1094 drivers/mmc/host/s3cmci.c 	dmaengine_slave_config(host->dma, &conf);
dma              1095 drivers/mmc/host/s3cmci.c 	desc = dmaengine_prep_slave_sg(host->dma, data->sg, data->sg_len,
dma              1103 drivers/mmc/host/s3cmci.c 	dma_async_issue_pending(host->dma);
dma              1637 drivers/mmc/host/s3cmci.c 		host->dma = dma_request_chan(&pdev->dev, "rx-tx");
dma              1638 drivers/mmc/host/s3cmci.c 		ret = PTR_ERR_OR_ZERO(host->dma);
dma              1684 drivers/mmc/host/s3cmci.c 	    host->base, host->irq, host->irq_cd, host->dma);
dma              1718 drivers/mmc/host/s3cmci.c 		dma_release_channel(host->dma);
dma              1768 drivers/mmc/host/s3cmci.c 		dma_release_channel(host->dma);
dma                26 drivers/mmc/host/s3cmci.h 	struct dma_chan		*dma;
dma              2878 drivers/mmc/host/sdhci.c 	dma_addr_t dma = host->adma_addr;
dma              2887 drivers/mmc/host/sdhci.c 			    (unsigned long long)dma,
dma              2894 drivers/mmc/host/sdhci.c 			    (unsigned long long)dma,
dma              2900 drivers/mmc/host/sdhci.c 		dma += host->desc_sz;
dma              3814 drivers/mmc/host/sdhci.c 		dma_addr_t dma;
dma              3834 drivers/mmc/host/sdhci.c 					 &dma, GFP_KERNEL);
dma              3839 drivers/mmc/host/sdhci.c 		} else if ((dma + host->align_buffer_sz) &
dma              3845 drivers/mmc/host/sdhci.c 					  host->adma_table_sz, buf, dma);
dma              3848 drivers/mmc/host/sdhci.c 			host->align_addr = dma;
dma              3851 drivers/mmc/host/sdhci.c 			host->adma_addr = dma + host->align_buffer_sz;
dma                30 drivers/mmc/host/sdhci.h #define  SDHCI_MAKE_BLKSZ(dma, blksz) (((dma & 0x7) << 12) | (blksz & 0xFFF))
dma               206 drivers/mmc/host/wbsd.c 	if (host->dma >= 0) {
dma               211 drivers/mmc/host/wbsd.c 		disable_dma(host->dma);
dma               212 drivers/mmc/host/wbsd.c 		clear_dma_ff(host->dma);
dma               592 drivers/mmc/host/wbsd.c 	if (host->dma >= 0) {
dma               613 drivers/mmc/host/wbsd.c 		disable_dma(host->dma);
dma               614 drivers/mmc/host/wbsd.c 		clear_dma_ff(host->dma);
dma               616 drivers/mmc/host/wbsd.c 			set_dma_mode(host->dma, DMA_MODE_READ & ~0x40);
dma               618 drivers/mmc/host/wbsd.c 			set_dma_mode(host->dma, DMA_MODE_WRITE & ~0x40);
dma               619 drivers/mmc/host/wbsd.c 		set_dma_addr(host->dma, host->dma_addr);
dma               620 drivers/mmc/host/wbsd.c 		set_dma_count(host->dma, size);
dma               622 drivers/mmc/host/wbsd.c 		enable_dma(host->dma);
dma               688 drivers/mmc/host/wbsd.c 	if (host->dma >= 0) {
dma               698 drivers/mmc/host/wbsd.c 		disable_dma(host->dma);
dma               699 drivers/mmc/host/wbsd.c 		clear_dma_ff(host->dma);
dma               700 drivers/mmc/host/wbsd.c 		count = get_dma_residue(host->dma);
dma               821 drivers/mmc/host/wbsd.c 		if (host->dma == -1)
dma              1198 drivers/mmc/host/wbsd.c 	host->dma = -1;
dma              1359 drivers/mmc/host/wbsd.c static void wbsd_request_dma(struct wbsd_host *host, int dma)
dma              1361 drivers/mmc/host/wbsd.c 	if (dma < 0)
dma              1364 drivers/mmc/host/wbsd.c 	if (request_dma(dma, DRIVER_NAME))
dma              1395 drivers/mmc/host/wbsd.c 	host->dma = dma;
dma              1414 drivers/mmc/host/wbsd.c 	free_dma(dma);
dma              1418 drivers/mmc/host/wbsd.c 		dma);
dma              1431 drivers/mmc/host/wbsd.c 	if (host->dma >= 0)
dma              1432 drivers/mmc/host/wbsd.c 		free_dma(host->dma);
dma              1434 drivers/mmc/host/wbsd.c 	host->dma = -1;
dma              1494 drivers/mmc/host/wbsd.c 	int base, int irq, int dma)
dma              1515 drivers/mmc/host/wbsd.c 	wbsd_request_dma(host, dma);
dma              1563 drivers/mmc/host/wbsd.c 	if (host->dma >= 0)
dma              1564 drivers/mmc/host/wbsd.c 		wbsd_write_config(host, WBSD_CONF_DRQ, host->dma);
dma              1581 drivers/mmc/host/wbsd.c 	int base, irq, dma;
dma              1598 drivers/mmc/host/wbsd.c 	dma = wbsd_read_config(host, WBSD_CONF_DRQ);
dma              1609 drivers/mmc/host/wbsd.c 	if ((dma != host->dma) && (host->dma != -1))
dma              1635 drivers/mmc/host/wbsd.c static int wbsd_init(struct device *dev, int base, int irq, int dma,
dma              1665 drivers/mmc/host/wbsd.c 	ret = wbsd_request_resources(host, base, irq, dma);
dma              1710 drivers/mmc/host/wbsd.c 	if (host->dma >= 0)
dma              1711 drivers/mmc/host/wbsd.c 		printk(" dma %d", (int)host->dma);
dma              1770 drivers/mmc/host/wbsd.c 	int io, irq, dma;
dma              1778 drivers/mmc/host/wbsd.c 		dma = pnp_dma(pnpdev, 0);
dma              1780 drivers/mmc/host/wbsd.c 		dma = -1;
dma              1782 drivers/mmc/host/wbsd.c 	DBGF("PnP resources: port %3x irq %d dma %d\n", io, irq, dma);
dma              1784 drivers/mmc/host/wbsd.c 	return wbsd_init(&pnpdev->dev, io, irq, dma, 1);
dma              1991 drivers/mmc/host/wbsd.c module_param_hw_named(dma, param_dma, int, dma, 0444);
dma              2002 drivers/mmc/host/wbsd.c MODULE_PARM_DESC(dma, "DMA channel to allocate. -1 for no DMA. (default 2)");
dma               172 drivers/mmc/host/wbsd.h 	int			dma;		/* DMA channel */
dma               154 drivers/mtd/nand/raw/atmel/nand-controller.c 		dma_addr_t dma;
dma               256 drivers/mtd/nand/raw/atmel/nand-controller.c 		dma_addr_t dma;
dma               448 drivers/mtd/nand/raw/atmel/nand-controller.c 	    !atmel_nand_dma_transfer(nc, buf, nand->activecs->io.dma, len,
dma               472 drivers/mtd/nand/raw/atmel/nand-controller.c 	    !atmel_nand_dma_transfer(nc, (void *)buf, nand->activecs->io.dma,
dma               657 drivers/mtd/nand/raw/atmel/nand-controller.c 					      nc->sram.dma, mtd->writesize,
dma               679 drivers/mtd/nand/raw/atmel/nand-controller.c 		ret = atmel_nand_dma_transfer(&nc->base, buf, nc->sram.dma,
dma              1615 drivers/mtd/nand/raw/atmel/nand-controller.c 		nand->cs[i].io.dma = res.start;
dma              1737 drivers/mtd/nand/raw/atmel/nand-controller.c 	nand->cs[0].io.dma = res->start;
dma              2174 drivers/mtd/nand/raw/atmel/nand-controller.c 	nc->sram.dma = res.start;
dma              2230 drivers/mtd/nand/raw/atmel/nand-controller.c 							   &nc->sram.dma);
dma               427 drivers/mtd/nand/raw/lpc32xx_slc.c static int lpc32xx_xmit_dma(struct mtd_info *mtd, dma_addr_t dma,
dma               437 drivers/mtd/nand/raw/lpc32xx_slc.c 	host->dma_slave_config.src_addr = dma;
dma               438 drivers/mtd/nand/raw/lpc32xx_slc.c 	host->dma_slave_config.dst_addr = dma;
dma               157 drivers/mtd/nand/raw/omap2.c 	struct dma_chan			*dma;
dma               471 drivers/mtd/nand/raw/omap2.c 	n = dma_map_sg(info->dma->device->dev, &sg, 1, dir);
dma               478 drivers/mtd/nand/raw/omap2.c 	tx = dmaengine_prep_slave_sg(info->dma, &sg, n,
dma               491 drivers/mtd/nand/raw/omap2.c 	dma_async_issue_pending(info->dma);
dma               513 drivers/mtd/nand/raw/omap2.c 	dma_unmap_sg(info->dma->device->dev, &sg, 1, dir);
dma               517 drivers/mtd/nand/raw/omap2.c 	dma_unmap_sg(info->dma->device->dev, &sg, 1, dir);
dma              1941 drivers/mtd/nand/raw/omap2.c 		info->dma = dma_request_chan(dev->parent, "rxtx");
dma              1943 drivers/mtd/nand/raw/omap2.c 		if (IS_ERR(info->dma)) {
dma              1945 drivers/mtd/nand/raw/omap2.c 			return PTR_ERR(info->dma);
dma              1956 drivers/mtd/nand/raw/omap2.c 			err = dmaengine_slave_config(info->dma, &cfg);
dma              2276 drivers/mtd/nand/raw/omap2.c 	if (!IS_ERR_OR_NULL(info->dma))
dma              2277 drivers/mtd/nand/raw/omap2.c 		dma_release_channel(info->dma);
dma              2294 drivers/mtd/nand/raw/omap2.c 	if (info->dma)
dma              2295 drivers/mtd/nand/raw/omap2.c 		dma_release_channel(info->dma);
dma               251 drivers/mtd/nand/raw/tegra_nand.c 	u32 isr, dma;
dma               254 drivers/mtd/nand/raw/tegra_nand.c 	dma = readl_relaxed(ctrl->regs + DMA_MST_CTRL);
dma               257 drivers/mtd/nand/raw/tegra_nand.c 	if (!isr && !(dma & DMA_MST_CTRL_IS_DONE))
dma               278 drivers/mtd/nand/raw/tegra_nand.c 	if (dma & DMA_MST_CTRL_IS_DONE) {
dma               279 drivers/mtd/nand/raw/tegra_nand.c 		writel_relaxed(dma, ctrl->regs + DMA_MST_CTRL);
dma               327 drivers/mtd/nand/raw/tegra_nand.c 	u32 isr, dma;
dma               338 drivers/mtd/nand/raw/tegra_nand.c 	dma = readl_relaxed(ctrl->regs + DMA_MST_CTRL);
dma               339 drivers/mtd/nand/raw/tegra_nand.c 	writel_relaxed(dma, ctrl->regs + DMA_MST_CTRL);
dma               207 drivers/net/appletalk/ltpc.c static int dma;
dma               371 drivers/net/appletalk/ltpc.c 	int dma = dev->dma;
dma               377 drivers/net/appletalk/ltpc.c 	disable_dma(dma);
dma               378 drivers/net/appletalk/ltpc.c 	clear_dma_ff(dma);
dma               379 drivers/net/appletalk/ltpc.c 	set_dma_mode(dma,DMA_MODE_READ);
dma               380 drivers/net/appletalk/ltpc.c 	set_dma_addr(dma,virt_to_bus(ltdmacbuf));
dma               381 drivers/net/appletalk/ltpc.c 	set_dma_count(dma,50);
dma               382 drivers/net/appletalk/ltpc.c 	enable_dma(dma);
dma               394 drivers/net/appletalk/ltpc.c 	int dma = dev->dma;
dma               399 drivers/net/appletalk/ltpc.c 	disable_dma(dma);
dma               400 drivers/net/appletalk/ltpc.c 	clear_dma_ff(dma);
dma               401 drivers/net/appletalk/ltpc.c 	set_dma_mode(dma,DMA_MODE_READ);
dma               402 drivers/net/appletalk/ltpc.c 	set_dma_addr(dma,virt_to_bus(ltdmabuf));
dma               403 drivers/net/appletalk/ltpc.c 	set_dma_count(dma,800);
dma               404 drivers/net/appletalk/ltpc.c 	enable_dma(dma);
dma               418 drivers/net/appletalk/ltpc.c 	int dma = dev->dma;
dma               423 drivers/net/appletalk/ltpc.c 	disable_dma(dma);
dma               424 drivers/net/appletalk/ltpc.c 	clear_dma_ff(dma);
dma               425 drivers/net/appletalk/ltpc.c 	set_dma_mode(dma,DMA_MODE_WRITE);
dma               426 drivers/net/appletalk/ltpc.c 	set_dma_addr(dma,virt_to_bus(ltdmabuf));
dma               427 drivers/net/appletalk/ltpc.c 	set_dma_count(dma,800);
dma               428 drivers/net/appletalk/ltpc.c 	enable_dma(dma);
dma               437 drivers/net/appletalk/ltpc.c 			get_dma_residue(dev->dma) );
dma               446 drivers/net/appletalk/ltpc.c 	int dma = dev->dma;
dma               452 drivers/net/appletalk/ltpc.c 	disable_dma(dma);
dma               453 drivers/net/appletalk/ltpc.c 	clear_dma_ff(dma);
dma               454 drivers/net/appletalk/ltpc.c 	set_dma_mode(dma,DMA_MODE_READ);
dma               455 drivers/net/appletalk/ltpc.c 	set_dma_addr(dma,virt_to_bus(ltdmabuf));
dma               456 drivers/net/appletalk/ltpc.c 	set_dma_count(dma,800);
dma               457 drivers/net/appletalk/ltpc.c 	enable_dma(dma);
dma               468 drivers/net/appletalk/ltpc.c 	int dma = dev->dma;
dma               473 drivers/net/appletalk/ltpc.c 	disable_dma(dma);
dma               474 drivers/net/appletalk/ltpc.c 	clear_dma_ff(dma);
dma               475 drivers/net/appletalk/ltpc.c 	set_dma_mode(dma,DMA_MODE_WRITE);
dma               476 drivers/net/appletalk/ltpc.c 	set_dma_addr(dma,virt_to_bus(ltdmacbuf));
dma               477 drivers/net/appletalk/ltpc.c 	set_dma_count(dma,50);
dma               478 drivers/net/appletalk/ltpc.c 	enable_dma(dma);
dma               933 drivers/net/appletalk/ltpc.c static int __init ltpc_probe_dma(int base, int dma)
dma               935 drivers/net/appletalk/ltpc.c 	int want = (dma == 3) ? 2 : (dma == 1) ? 1 : 3;
dma              1107 drivers/net/appletalk/ltpc.c 	dma = ltpc_probe_dma(io, dma);
dma              1108 drivers/net/appletalk/ltpc.c 	if (!dma) {  /* no dma channel */
dma              1116 drivers/net/appletalk/ltpc.c 		printk(KERN_INFO "Apple/Farallon LocalTalk-PC card at %03x, IR%d, DMA%d.\n",io,irq,dma);
dma              1118 drivers/net/appletalk/ltpc.c 		printk(KERN_INFO "Apple/Farallon LocalTalk-PC card at %03x, DMA%d.  Using polled mode.\n",io,dma);
dma              1123 drivers/net/appletalk/ltpc.c 	dev->dma = dma;
dma              1130 drivers/net/appletalk/ltpc.c 	disable_dma(dma);
dma              1131 drivers/net/appletalk/ltpc.c 	clear_dma_ff(dma);
dma              1132 drivers/net/appletalk/ltpc.c 	set_dma_mode(dma,DMA_MODE_READ);
dma              1133 drivers/net/appletalk/ltpc.c 	set_dma_addr(dma,virt_to_bus(ltdmabuf));
dma              1134 drivers/net/appletalk/ltpc.c 	set_dma_count(dma,0x100);
dma              1135 drivers/net/appletalk/ltpc.c 	enable_dma(dma);
dma              1212 drivers/net/appletalk/ltpc.c 			dma = ints[3];
dma              1230 drivers/net/appletalk/ltpc.c module_param_hw(dma, int, dma, 0);
dma              1260 drivers/net/appletalk/ltpc.c 	if (dev_ltpc->dma)
dma              1261 drivers/net/appletalk/ltpc.c 		free_dma(dev_ltpc->dma);
dma               255 drivers/net/can/grcan.c 	struct grcan_dma dma;
dma               501 drivers/net/can/grcan.c 	struct grcan_dma *dma = &priv->dma;
dma               527 drivers/net/can/grcan.c 					     dma->tx.size);
dma               537 drivers/net/can/grcan.c 	struct grcan_dma *dma = &priv->dma;
dma               558 drivers/net/can/grcan.c 		txrd = grcan_ring_add(txrd, GRCAN_MSG_SIZE, dma->tx.size);
dma               576 drivers/net/can/grcan.c 	struct grcan_dma *dma = &priv->dma;
dma               695 drivers/net/can/grcan.c 				if (grcan_txspace(dma->tx.size, txwr,
dma               863 drivers/net/can/grcan.c 		if (grcan_txspace(priv->dma.tx.size, txwr, priv->eskbp) &&
dma               925 drivers/net/can/grcan.c 	struct grcan_dma *dma = &priv->dma;
dma               927 drivers/net/can/grcan.c 	dma_free_coherent(&dev->dev, dma->base_size, dma->base_buf,
dma               928 drivers/net/can/grcan.c 			  dma->base_handle);
dma               929 drivers/net/can/grcan.c 	memset(dma, 0, sizeof(*dma));
dma               936 drivers/net/can/grcan.c 	struct grcan_dma *dma = &priv->dma;
dma               937 drivers/net/can/grcan.c 	struct grcan_dma_buffer *large = rsize > tsize ? &dma->rx : &dma->tx;
dma               938 drivers/net/can/grcan.c 	struct grcan_dma_buffer *small = rsize > tsize ? &dma->tx : &dma->rx;
dma               951 drivers/net/can/grcan.c 	dma->base_size = lsize + ssize + GRCAN_BUFFER_ALIGNMENT;
dma               952 drivers/net/can/grcan.c 	dma->base_buf = dma_alloc_coherent(&dev->dev,
dma               953 drivers/net/can/grcan.c 					   dma->base_size,
dma               954 drivers/net/can/grcan.c 					   &dma->base_handle,
dma               957 drivers/net/can/grcan.c 	if (!dma->base_buf)
dma               960 drivers/net/can/grcan.c 	dma->tx.size = tsize;
dma               961 drivers/net/can/grcan.c 	dma->rx.size = rsize;
dma               963 drivers/net/can/grcan.c 	large->handle = ALIGN(dma->base_handle, GRCAN_BUFFER_ALIGNMENT);
dma               965 drivers/net/can/grcan.c 	shift = large->handle - dma->base_handle;
dma               967 drivers/net/can/grcan.c 	large->buf = dma->base_buf + shift;
dma               982 drivers/net/can/grcan.c 	grcan_write_reg(&regs->txaddr, priv->dma.tx.handle);
dma               983 drivers/net/can/grcan.c 	grcan_write_reg(&regs->txsize, priv->dma.tx.size);
dma               986 drivers/net/can/grcan.c 	grcan_write_reg(&regs->rxaddr, priv->dma.rx.handle);
dma               987 drivers/net/can/grcan.c 	grcan_write_reg(&regs->rxsize, priv->dma.rx.size);
dma              1044 drivers/net/can/grcan.c 	struct grcan_dma *dma = &priv->dma;
dma              1056 drivers/net/can/grcan.c 	priv->echo_skb = kcalloc(dma->tx.size, sizeof(*priv->echo_skb),
dma              1062 drivers/net/can/grcan.c 	priv->can.echo_skb_max = dma->tx.size;
dma              1065 drivers/net/can/grcan.c 	priv->txdlc = kcalloc(dma->tx.size, sizeof(*priv->txdlc), GFP_KERNEL);
dma              1167 drivers/net/can/grcan.c 	struct grcan_dma *dma = &priv->dma;
dma              1193 drivers/net/can/grcan.c 		slot = dma->rx.buf + rd;
dma              1221 drivers/net/can/grcan.c 		rd = grcan_ring_add(rd, GRCAN_MSG_SIZE, dma->rx.size);
dma              1286 drivers/net/can/grcan.c 	struct grcan_dma *dma = &priv->dma;
dma              1307 drivers/net/can/grcan.c 		if (grcan_txspace(dma->tx.size, txwr, priv->eskbp))
dma              1353 drivers/net/can/grcan.c 	struct grcan_dma *dma = &priv->dma;
dma              1379 drivers/net/can/grcan.c 	space = grcan_txspace(dma->tx.size, txwr, priv->eskbp);
dma              1382 drivers/net/can/grcan.c 	slot = dma->tx.buf + txwr;
dma              1434 drivers/net/can/grcan.c 		if (unlikely(grcan_ring_sub(txwr, txrd, dma->tx.size) == 1)) {
dma              1460 drivers/net/can/grcan.c 			grcan_ring_add(txwr, GRCAN_MSG_SIZE, dma->tx.size));
dma               476 drivers/net/ethernet/3com/3c515.c 	if (dev->dma)
dma               477 drivers/net/ethernet/3com/3c515.c 		free_dma(dev->dma);
dma               599 drivers/net/ethernet/3com/3c515.c 	dev->dma = inw(ioaddr + 0x2000) & 7;
dma               654 drivers/net/ethernet/3com/3c515.c 		if (request_dma(dev->dma, "3c515")) {
dma               655 drivers/net/ethernet/3com/3c515.c 			pr_cont(", DMA %d allocation failed", dev->dma);
dma               656 drivers/net/ethernet/3com/3c515.c 			dev->dma = 0;
dma               658 drivers/net/ethernet/3com/3c515.c 			pr_cont(", DMA %d", dev->dma);
dma               766 drivers/net/ethernet/3com/3c515.c 		    dev->dma == 0 ||
dma               770 drivers/net/ethernet/3com/3c515.c 		enable_dma(dev->dma);
dma               771 drivers/net/ethernet/3com/3c515.c 		set_dma_mode(dev->dma, DMA_MODE_CASCADE);
dma              1723 drivers/net/ethernet/3com/3c59x.c 	dma_addr_t dma;
dma              1747 drivers/net/ethernet/3com/3c59x.c 			dma = dma_map_single(vp->gendev, skb->data,
dma              1749 drivers/net/ethernet/3com/3c59x.c 			if (dma_mapping_error(vp->gendev, dma))
dma              1751 drivers/net/ethernet/3com/3c59x.c 			vp->rx_ring[i].addr = cpu_to_le32(dma);
dma              2555 drivers/net/ethernet/3com/3c59x.c 					dma_addr_t dma = dma_map_single(vp->gendev, skb_put(skb, pkt_len),
dma              2557 drivers/net/ethernet/3com/3c59x.c 					iowrite32(dma, ioaddr + Wn7_MasterAddr);
dma              2562 drivers/net/ethernet/3com/3c59x.c 					dma_unmap_single(vp->gendev, dma, pkt_len, DMA_FROM_DEVICE);
dma              2618 drivers/net/ethernet/3com/3c59x.c 			dma_addr_t dma = le32_to_cpu(vp->rx_ring[entry].addr);
dma              2629 drivers/net/ethernet/3com/3c59x.c 				dma_sync_single_for_cpu(vp->gendev, dma, PKT_BUF_SZ, DMA_FROM_DEVICE);
dma              2633 drivers/net/ethernet/3com/3c59x.c 				dma_sync_single_for_device(vp->gendev, dma, PKT_BUF_SZ, DMA_FROM_DEVICE);
dma              2658 drivers/net/ethernet/3com/3c59x.c 				dma_unmap_single(vp->gendev, dma, PKT_BUF_SZ, DMA_FROM_DEVICE);
dma               468 drivers/net/ethernet/amazon/ena/ena_netdev.c 	dma_addr_t dma;
dma               482 drivers/net/ethernet/amazon/ena/ena_netdev.c 	dma = dma_map_page(rx_ring->dev, page, 0, ENA_PAGE_SIZE,
dma               484 drivers/net/ethernet/amazon/ena/ena_netdev.c 	if (unlikely(dma_mapping_error(rx_ring->dev, dma))) {
dma               498 drivers/net/ethernet/amazon/ena/ena_netdev.c 	ena_buf->paddr = dma;
dma              2147 drivers/net/ethernet/amazon/ena/ena_netdev.c 	dma_addr_t dma;
dma              2190 drivers/net/ethernet/amazon/ena/ena_netdev.c 		dma = dma_map_single(tx_ring->dev, skb->data + push_len,
dma              2192 drivers/net/ethernet/amazon/ena/ena_netdev.c 		if (unlikely(dma_mapping_error(tx_ring->dev, dma)))
dma              2195 drivers/net/ethernet/amazon/ena/ena_netdev.c 		ena_buf->paddr = dma;
dma              2217 drivers/net/ethernet/amazon/ena/ena_netdev.c 		dma = skb_frag_dma_map(tx_ring->dev, frag, delta,
dma              2219 drivers/net/ethernet/amazon/ena/ena_netdev.c 		if (unlikely(dma_mapping_error(tx_ring->dev, dma)))
dma              2222 drivers/net/ethernet/amazon/ena/ena_netdev.c 		ena_buf->paddr = dma;
dma               743 drivers/net/ethernet/amd/a2065.c 	dev->dma = 0;
dma              1247 drivers/net/ethernet/amd/declance.c 	dev->dma = 0;
dma               141 drivers/net/ethernet/amd/hplance.c 	dev->dma = 0;
dma               250 drivers/net/ethernet/amd/lance.c 	int dma;
dma               318 drivers/net/ethernet/amd/lance.c static int dma[MAX_CARDS];
dma               322 drivers/net/ethernet/amd/lance.c module_param_hw_array(dma, int, dma, NULL, 0);
dma               326 drivers/net/ethernet/amd/lance.c MODULE_PARM_DESC(dma, "LANCE/PCnet ISA DMA channel (ignored for some devices)");
dma               347 drivers/net/ethernet/amd/lance.c 		dev->dma = dma[this_dev];
dma               363 drivers/net/ethernet/amd/lance.c 	if (dev->dma != 4)
dma               364 drivers/net/ethernet/amd/lance.c 		free_dma(dev->dma);
dma               587 drivers/net/ethernet/amd/lance.c 		dev->dma = 4;			/* Native bus-master, no DMA channel needed. */
dma               593 drivers/net/ethernet/amd/lance.c 		dev->dma = dma_tbl[(port_val >> 4) & 3];
dma               595 drivers/net/ethernet/amd/lance.c 		printk(" HP Vectra IRQ %d DMA %d.\n", dev->irq, dev->dma);
dma               600 drivers/net/ethernet/amd/lance.c 		dev->dma = dma_tbl[(reset_val >> 2) & 3];
dma               602 drivers/net/ethernet/amd/lance.c 		printk(" HP J2405A IRQ %d DMA %d.\n", dev->irq, dev->dma);
dma               607 drivers/net/ethernet/amd/lance.c 		dev->dma = bus_info & 0x07;
dma               612 drivers/net/ethernet/amd/lance.c 			dev->dma = dev->mem_start & 0x07;
dma               615 drivers/net/ethernet/amd/lance.c 	if (dev->dma == 0) {
dma               647 drivers/net/ethernet/amd/lance.c 			dev->dma = 4;
dma               650 drivers/net/ethernet/amd/lance.c 	if (dev->dma == 4) {
dma               652 drivers/net/ethernet/amd/lance.c 	} else if (dev->dma) {
dma               653 drivers/net/ethernet/amd/lance.c 		if (request_dma(dev->dma, chipname)) {
dma               654 drivers/net/ethernet/amd/lance.c 			printk("DMA %d allocation failed.\n", dev->dma);
dma               657 drivers/net/ethernet/amd/lance.c 			printk(", assigned DMA %d.\n", dev->dma);
dma               661 drivers/net/ethernet/amd/lance.c 			int dma = dmas[i];
dma               666 drivers/net/ethernet/amd/lance.c 			if (test_bit(dma, &dma_channels))
dma               669 drivers/net/ethernet/amd/lance.c 			if (request_dma(dma, chipname))
dma               673 drivers/net/ethernet/amd/lance.c 			set_dma_mode(dma, DMA_MODE_CASCADE);
dma               674 drivers/net/ethernet/amd/lance.c 			enable_dma(dma);
dma               683 drivers/net/ethernet/amd/lance.c 				dev->dma = dma;
dma               684 drivers/net/ethernet/amd/lance.c 				printk(", DMA %d.\n", dev->dma);
dma               688 drivers/net/ethernet/amd/lance.c 				disable_dma(dma);
dma               690 drivers/net/ethernet/amd/lance.c 				free_dma(dma);
dma               736 drivers/net/ethernet/amd/lance.c 	if (dev->dma != 4)
dma               737 drivers/net/ethernet/amd/lance.c 		free_dma(dev->dma);
dma               767 drivers/net/ethernet/amd/lance.c 	if (dev->dma != 4) {
dma               769 drivers/net/ethernet/amd/lance.c 		enable_dma(dev->dma);
dma               770 drivers/net/ethernet/amd/lance.c 		set_dma_mode(dev->dma, DMA_MODE_CASCADE);
dma               787 drivers/net/ethernet/amd/lance.c 			   dev->name, dev->irq, dev->dma,
dma              1246 drivers/net/ethernet/amd/lance.c 	if (dev->dma != 4)
dma              1249 drivers/net/ethernet/amd/lance.c 		disable_dma(dev->dma);
dma                95 drivers/net/ethernet/amd/mvme147.c 	dev->dma = 0;
dma               347 drivers/net/ethernet/amd/ni65.c 	disable_dma(dev->dma);
dma               348 drivers/net/ethernet/amd/ni65.c 	free_dma(dev->dma);
dma               356 drivers/net/ethernet/amd/ni65.c static int dma;
dma               375 drivers/net/ethernet/amd/ni65.c 		dma = dev->dma;
dma               424 drivers/net/ethernet/amd/ni65.c 	dev->dma = dma;
dma               486 drivers/net/ethernet/amd/ni65.c 		dev->dma = dmatab[inw(ioaddr+L_CONFIG)&3];
dma               487 drivers/net/ethernet/amd/ni65.c 		printk("IRQ %d (from card), DMA %d (from card).\n",dev->irq,dev->dma);
dma               490 drivers/net/ethernet/amd/ni65.c 		if(dev->dma == 0) {
dma               496 drivers/net/ethernet/amd/ni65.c 				int dma = dmatab[i];
dma               497 drivers/net/ethernet/amd/ni65.c 				if(test_bit(dma,&dma_channels) || request_dma(dma,"ni6510"))
dma               501 drivers/net/ethernet/amd/ni65.c 				disable_dma(dma);
dma               502 drivers/net/ethernet/amd/ni65.c 				set_dma_mode(dma,DMA_MODE_CASCADE);
dma               503 drivers/net/ethernet/amd/ni65.c 				enable_dma(dma);
dma               509 drivers/net/ethernet/amd/ni65.c 				disable_dma(dma);
dma               510 drivers/net/ethernet/amd/ni65.c 				free_dma(dma);
dma               523 drivers/net/ethernet/amd/ni65.c 			dev->dma = dmatab[i];
dma               524 drivers/net/ethernet/amd/ni65.c 			printk("DMA %d (autodetected), ",dev->dma);
dma               527 drivers/net/ethernet/amd/ni65.c 			printk("DMA %d (assigned), ",dev->dma);
dma               551 drivers/net/ethernet/amd/ni65.c 	if(request_dma(dev->dma, cards[p->cardno].cardname ) != 0)
dma               553 drivers/net/ethernet/amd/ni65.c 		printk(KERN_ERR "%s: Can't request dma-channel %d\n",dev->name,(int) dev->dma);
dma               799 drivers/net/ethernet/amd/ni65.c 	 disable_dma(dev->dma); /* I've never worked with dma, but we do it like the packetdriver */
dma               800 drivers/net/ethernet/amd/ni65.c 	 set_dma_mode(dev->dma,DMA_MODE_CASCADE);
dma               801 drivers/net/ethernet/amd/ni65.c 	 enable_dma(dev->dma);
dma               810 drivers/net/ethernet/amd/ni65.c 		 disable_dma(dev->dma);
dma               863 drivers/net/ethernet/amd/ni65.c 	 disable_dma(dev->dma);
dma              1228 drivers/net/ethernet/amd/ni65.c module_param_hw(dma, int, dma, 0);
dma              1231 drivers/net/ethernet/amd/ni65.c MODULE_PARM_DESC(dma, "ni6510 ISA DMA channel (ignored for some cards)");
dma               200 drivers/net/ethernet/amd/xgbe/xgbe-desc.c 				 dma_addr_t *dma, int node)
dma               206 drivers/net/ethernet/amd/xgbe/xgbe-desc.c 	mem = dma_alloc_coherent(dev, size, dma, GFP_KERNEL);
dma               210 drivers/net/ethernet/amd/xgbe/xgbe-desc.c 		mem = dma_alloc_coherent(dev, size, dma, GFP_KERNEL);
dma                16 drivers/net/ethernet/apm/xgene/xgene_enet_hw.c 	u64 addr = ring->dma;
dma              1053 drivers/net/ethernet/apm/xgene/xgene_enet_main.c 	dmam_free_coherent(dev, ring->size, ring->desc_addr, ring->dma);
dma              1138 drivers/net/ethernet/apm/xgene/xgene_enet_main.c 		dmam_free_coherent(dev, ring->size, ring->desc_addr, ring->dma);
dma              1234 drivers/net/ethernet/apm/xgene/xgene_enet_main.c 	ring->desc_addr = dmam_alloc_coherent(dev, size, &ring->dma,
dma              1248 drivers/net/ethernet/apm/xgene/xgene_enet_main.c 					   ring->dma);
dma               105 drivers/net/ethernet/apm/xgene/xgene_enet_main.h 	dma_addr_t dma;
dma                15 drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c 	u64 addr = ring->dma;
dma                91 drivers/net/ethernet/apple/mace.c static inline void dbdma_reset(volatile struct dbdma_regs __iomem *dma);
dma               302 drivers/net/ethernet/apple/mace.c static void dbdma_reset(volatile struct dbdma_regs __iomem *dma)
dma               306 drivers/net/ethernet/apple/mace.c     out_le32(&dma->control, (WAKE|FLUSH|PAUSE|RUN) << 16);
dma               313 drivers/net/ethernet/apple/mace.c 	if (le32_to_cpu(dma->control) & RUN)
dma                48 drivers/net/ethernet/atheros/alx/alx.h 	DEFINE_DMA_UNMAP_ADDR(dma);
dma               115 drivers/net/ethernet/atheros/alx/alx.h 		dma_addr_t dma;
dma                60 drivers/net/ethernet/atheros/alx/main.c 				 dma_unmap_addr(txb, dma),
dma                77 drivers/net/ethernet/atheros/alx/main.c 	dma_addr_t dma;
dma               104 drivers/net/ethernet/atheros/alx/main.c 		dma = dma_map_single(&alx->hw.pdev->dev,
dma               107 drivers/net/ethernet/atheros/alx/main.c 		if (dma_mapping_error(&alx->hw.pdev->dev, dma)) {
dma               115 drivers/net/ethernet/atheros/alx/main.c 		if (WARN_ON(dma & 3)) {
dma               122 drivers/net/ethernet/atheros/alx/main.c 		dma_unmap_addr_set(cur_buf, dma, dma);
dma               123 drivers/net/ethernet/atheros/alx/main.c 		rfd->addr = cpu_to_le64(dma);
dma               244 drivers/net/ethernet/atheros/alx/main.c 				 dma_unmap_addr(rxb, dma),
dma               454 drivers/net/ethernet/atheros/alx/main.c 	u32 addr_hi = ((u64)alx->descmem.dma) >> 32;
dma               519 drivers/net/ethernet/atheros/alx/main.c 					 dma_unmap_addr(cur_buf, dma),
dma               525 drivers/net/ethernet/atheros/alx/main.c 			dma_unmap_addr_set(cur_buf, dma, 0);
dma               625 drivers/net/ethernet/atheros/alx/main.c 	txq->tpd_dma = alx->descmem.dma + offset;
dma               639 drivers/net/ethernet/atheros/alx/main.c 	rxq->rrd_dma = alx->descmem.dma + offset;
dma               643 drivers/net/ethernet/atheros/alx/main.c 	rxq->rfd_dma = alx->descmem.dma + offset;
dma               665 drivers/net/ethernet/atheros/alx/main.c 					       &alx->descmem.dma, GFP_KERNEL);
dma               707 drivers/net/ethernet/atheros/alx/main.c 				  alx->descmem.dma);
dma              1439 drivers/net/ethernet/atheros/alx/main.c 	dma_addr_t dma;
dma              1456 drivers/net/ethernet/atheros/alx/main.c 	dma = dma_map_single(txq->dev, skb->data, maplen,
dma              1458 drivers/net/ethernet/atheros/alx/main.c 	if (dma_mapping_error(txq->dev, dma))
dma              1462 drivers/net/ethernet/atheros/alx/main.c 	dma_unmap_addr_set(&txq->bufs[txq->write_idx], dma, dma);
dma              1464 drivers/net/ethernet/atheros/alx/main.c 	tpd->adrl.addr = cpu_to_le64(dma);
dma              1477 drivers/net/ethernet/atheros/alx/main.c 		dma = skb_frag_dma_map(txq->dev, frag, 0,
dma              1479 drivers/net/ethernet/atheros/alx/main.c 		if (dma_mapping_error(txq->dev, dma))
dma              1482 drivers/net/ethernet/atheros/alx/main.c 		dma_unmap_addr_set(&txq->bufs[txq->write_idx], dma, dma);
dma              1484 drivers/net/ethernet/atheros/alx/main.c 		tpd->adrl.addr = cpu_to_le64(dma);
dma               434 drivers/net/ethernet/atheros/atl1c/atl1c.h 	dma_addr_t dma;		/* physical address*/
dma               457 drivers/net/ethernet/atheros/atl1c/atl1c.h 	dma_addr_t dma;
dma               475 drivers/net/ethernet/atheros/atl1c/atl1c.h 	dma_addr_t dma;		/* descriptor ring physical address */
dma               486 drivers/net/ethernet/atheros/atl1c/atl1c.h 	dma_addr_t dma;		/* descriptor ring physical address */
dma               497 drivers/net/ethernet/atheros/atl1c/atl1c.h 	dma_addr_t dma;		/* descriptor ring physical address */
dma               830 drivers/net/ethernet/atheros/atl1c/atl1c_main.c 	if (buffer_info->dma) {
dma               837 drivers/net/ethernet/atheros/atl1c/atl1c_main.c 			pci_unmap_single(pdev, buffer_info->dma,
dma               840 drivers/net/ethernet/atheros/atl1c/atl1c_main.c 			pci_unmap_page(pdev, buffer_info->dma,
dma               845 drivers/net/ethernet/atheros/atl1c/atl1c_main.c 	buffer_info->dma = 0;
dma               941 drivers/net/ethernet/atheros/atl1c/atl1c_main.c 					adapter->ring_header.dma);
dma              1010 drivers/net/ethernet/atheros/atl1c/atl1c_main.c 					       &ring_header->dma, GFP_KERNEL);
dma              1017 drivers/net/ethernet/atheros/atl1c/atl1c_main.c 	tpd_ring[0].dma = roundup(ring_header->dma, 8);
dma              1018 drivers/net/ethernet/atheros/atl1c/atl1c_main.c 	offset = tpd_ring[0].dma - ring_header->dma;
dma              1020 drivers/net/ethernet/atheros/atl1c/atl1c_main.c 		tpd_ring[i].dma = ring_header->dma + offset;
dma              1027 drivers/net/ethernet/atheros/atl1c/atl1c_main.c 	rfd_ring->dma = ring_header->dma + offset;
dma              1033 drivers/net/ethernet/atheros/atl1c/atl1c_main.c 	rrd_ring->dma = ring_header->dma + offset;
dma              1056 drivers/net/ethernet/atheros/atl1c/atl1c_main.c 			(u32)((tpd_ring[atl1c_trans_normal].dma &
dma              1060 drivers/net/ethernet/atheros/atl1c/atl1c_main.c 			(u32)(tpd_ring[atl1c_trans_normal].dma &
dma              1063 drivers/net/ethernet/atheros/atl1c/atl1c_main.c 			(u32)(tpd_ring[atl1c_trans_high].dma &
dma              1071 drivers/net/ethernet/atheros/atl1c/atl1c_main.c 			(u32)((rfd_ring->dma & AT_DMA_HI_ADDR_MASK) >> 32));
dma              1073 drivers/net/ethernet/atheros/atl1c/atl1c_main.c 			(u32)(rfd_ring->dma & AT_DMA_LO_ADDR_MASK));
dma              1082 drivers/net/ethernet/atheros/atl1c/atl1c_main.c 			(u32)(rrd_ring->dma & AT_DMA_LO_ADDR_MASK));
dma              1734 drivers/net/ethernet/atheros/atl1c/atl1c_main.c 		buffer_info->dma = mapping;
dma              1737 drivers/net/ethernet/atheros/atl1c/atl1c_main.c 		rfd_desc->buffer_addr = cpu_to_le64(buffer_info->dma);
dma              1837 drivers/net/ethernet/atheros/atl1c/atl1c_main.c 			pci_unmap_single(pdev, buffer_info->dma,
dma              2114 drivers/net/ethernet/atheros/atl1c/atl1c_main.c 		buffer_info->dma = pci_map_single(adapter->pdev,
dma              2117 drivers/net/ethernet/atheros/atl1c/atl1c_main.c 						   buffer_info->dma)))
dma              2123 drivers/net/ethernet/atheros/atl1c/atl1c_main.c 		use_tpd->buffer_addr = cpu_to_le64(buffer_info->dma);
dma              2138 drivers/net/ethernet/atheros/atl1c/atl1c_main.c 		buffer_info->dma =
dma              2142 drivers/net/ethernet/atheros/atl1c/atl1c_main.c 						   buffer_info->dma)))
dma              2148 drivers/net/ethernet/atheros/atl1c/atl1c_main.c 		use_tpd->buffer_addr = cpu_to_le64(buffer_info->dma);
dma              2160 drivers/net/ethernet/atheros/atl1c/atl1c_main.c 		buffer_info->dma = skb_frag_dma_map(&adapter->pdev->dev,
dma              2164 drivers/net/ethernet/atheros/atl1c/atl1c_main.c 		if (dma_mapping_error(&adapter->pdev->dev, buffer_info->dma))
dma              2170 drivers/net/ethernet/atheros/atl1c/atl1c_main.c 		use_tpd->buffer_addr = cpu_to_le64(buffer_info->dma);
dma              2183 drivers/net/ethernet/atheros/atl1c/atl1c_main.c 	buffer_info->dma = 0;
dma               371 drivers/net/ethernet/atheros/atl1e/atl1e.h 	dma_addr_t dma;
dma               380 drivers/net/ethernet/atheros/atl1e/atl1e.h 	dma_addr_t	dma;    /* receive rage DMA address */
dma               398 drivers/net/ethernet/atheros/atl1e/atl1e.h 	dma_addr_t	   dma;    /* descriptor ring physical address */
dma               411 drivers/net/ethernet/atheros/atl1e/atl1e.h 	dma_addr_t  	dma;
dma               659 drivers/net/ethernet/atheros/atl1e/atl1e_main.c 		if (tx_buffer->dma) {
dma               661 drivers/net/ethernet/atheros/atl1e/atl1e_main.c 				pci_unmap_single(pdev, tx_buffer->dma,
dma               664 drivers/net/ethernet/atheros/atl1e/atl1e_main.c 				pci_unmap_page(pdev, tx_buffer->dma,
dma               666 drivers/net/ethernet/atheros/atl1e/atl1e_main.c 			tx_buffer->dma = 0;
dma               828 drivers/net/ethernet/atheros/atl1e/atl1e_main.c 	tx_ring->dma = roundup(adapter->ring_dma, 8);
dma               829 drivers/net/ethernet/atheros/atl1e/atl1e_main.c 	offset = tx_ring->dma - adapter->ring_dma;
dma               844 drivers/net/ethernet/atheros/atl1e/atl1e_main.c 			rx_page_desc[i].rx_page[j].dma =
dma               896 drivers/net/ethernet/atheros/atl1e/atl1e_main.c 			(u32)((tx_ring->dma) & AT_DMA_LO_ADDR_MASK));
dma               911 drivers/net/ethernet/atheros/atl1e/atl1e_main.c 			page_phy_addr = rx_page_desc[i].rx_page[j].dma;
dma              1238 drivers/net/ethernet/atheros/atl1e/atl1e_main.c 		if (tx_buffer->dma) {
dma              1240 drivers/net/ethernet/atheros/atl1e/atl1e_main.c 				pci_unmap_single(adapter->pdev, tx_buffer->dma,
dma              1243 drivers/net/ethernet/atheros/atl1e/atl1e_main.c 				pci_unmap_page(adapter->pdev, tx_buffer->dma,
dma              1245 drivers/net/ethernet/atheros/atl1e/atl1e_main.c 			tx_buffer->dma = 0;
dma              1717 drivers/net/ethernet/atheros/atl1e/atl1e_main.c 		tx_buffer->dma = pci_map_single(adapter->pdev,
dma              1719 drivers/net/ethernet/atheros/atl1e/atl1e_main.c 		if (dma_mapping_error(&adapter->pdev->dev, tx_buffer->dma))
dma              1724 drivers/net/ethernet/atheros/atl1e/atl1e_main.c 		use_tpd->buffer_addr = cpu_to_le64(tx_buffer->dma);
dma              1745 drivers/net/ethernet/atheros/atl1e/atl1e_main.c 		tx_buffer->dma =
dma              1749 drivers/net/ethernet/atheros/atl1e/atl1e_main.c 		if (dma_mapping_error(&adapter->pdev->dev, tx_buffer->dma)) {
dma              1756 drivers/net/ethernet/atheros/atl1e/atl1e_main.c 				pci_unmap_single(adapter->pdev, tx_buffer->dma,
dma              1766 drivers/net/ethernet/atheros/atl1e/atl1e_main.c 		use_tpd->buffer_addr = cpu_to_le64(tx_buffer->dma);
dma              1793 drivers/net/ethernet/atheros/atl1e/atl1e_main.c 			tx_buffer->dma = skb_frag_dma_map(&adapter->pdev->dev,
dma              1799 drivers/net/ethernet/atheros/atl1e/atl1e_main.c 			if (dma_mapping_error(&adapter->pdev->dev, tx_buffer->dma)) {
dma              1806 drivers/net/ethernet/atheros/atl1e/atl1e_main.c 					dma_unmap_page(&adapter->pdev->dev, tx_buffer->dma,
dma              1816 drivers/net/ethernet/atheros/atl1e/atl1e_main.c 			use_tpd->buffer_addr = cpu_to_le64(tx_buffer->dma);
dma              1056 drivers/net/ethernet/atheros/atlx/atl1.c 		&ring_header->dma);
dma              1064 drivers/net/ethernet/atheros/atlx/atl1.c 	tpd_ring->dma = ring_header->dma;
dma              1065 drivers/net/ethernet/atheros/atlx/atl1.c 	offset = (tpd_ring->dma & 0x7) ? (8 - (ring_header->dma & 0x7)) : 0;
dma              1066 drivers/net/ethernet/atheros/atlx/atl1.c 	tpd_ring->dma += offset;
dma              1071 drivers/net/ethernet/atheros/atlx/atl1.c 	rfd_ring->dma = tpd_ring->dma + tpd_ring->size;
dma              1072 drivers/net/ethernet/atheros/atlx/atl1.c 	offset = (rfd_ring->dma & 0x7) ? (8 - (rfd_ring->dma & 0x7)) : 0;
dma              1073 drivers/net/ethernet/atheros/atlx/atl1.c 	rfd_ring->dma += offset;
dma              1079 drivers/net/ethernet/atheros/atlx/atl1.c 	rrd_ring->dma = rfd_ring->dma + rfd_ring->size;
dma              1080 drivers/net/ethernet/atheros/atlx/atl1.c 	offset = (rrd_ring->dma & 0x7) ? (8 - (rrd_ring->dma & 0x7)) : 0;
dma              1081 drivers/net/ethernet/atheros/atlx/atl1.c 	rrd_ring->dma += offset;
dma              1087 drivers/net/ethernet/atheros/atlx/atl1.c 	adapter->cmb.dma = rrd_ring->dma + rrd_ring->size;
dma              1088 drivers/net/ethernet/atheros/atlx/atl1.c 	offset = (adapter->cmb.dma & 0x7) ? (8 - (adapter->cmb.dma & 0x7)) : 0;
dma              1089 drivers/net/ethernet/atheros/atlx/atl1.c 	adapter->cmb.dma += offset;
dma              1094 drivers/net/ethernet/atheros/atlx/atl1.c 	adapter->smb.dma = adapter->cmb.dma + sizeof(struct coals_msg_block);
dma              1095 drivers/net/ethernet/atheros/atlx/atl1.c 	offset = (adapter->smb.dma & 0x7) ? (8 - (adapter->smb.dma & 0x7)) : 0;
dma              1096 drivers/net/ethernet/atheros/atlx/atl1.c 	adapter->smb.dma += offset;
dma              1140 drivers/net/ethernet/atheros/atlx/atl1.c 		if (buffer_info->dma) {
dma              1141 drivers/net/ethernet/atheros/atlx/atl1.c 			pci_unmap_page(pdev, buffer_info->dma,
dma              1143 drivers/net/ethernet/atheros/atlx/atl1.c 			buffer_info->dma = 0;
dma              1179 drivers/net/ethernet/atheros/atlx/atl1.c 		if (buffer_info->dma) {
dma              1180 drivers/net/ethernet/atheros/atlx/atl1.c 			pci_unmap_page(pdev, buffer_info->dma,
dma              1182 drivers/net/ethernet/atheros/atlx/atl1.c 			buffer_info->dma = 0;
dma              1223 drivers/net/ethernet/atheros/atlx/atl1.c 		ring_header->dma);
dma              1227 drivers/net/ethernet/atheros/atlx/atl1.c 	tpd_ring->dma = 0;
dma              1231 drivers/net/ethernet/atheros/atlx/atl1.c 	rfd_ring->dma = 0;
dma              1234 drivers/net/ethernet/atheros/atlx/atl1.c 	rrd_ring->dma = 0;
dma              1236 drivers/net/ethernet/atheros/atlx/atl1.c 	adapter->cmb.dma = 0;
dma              1239 drivers/net/ethernet/atheros/atlx/atl1.c 	adapter->smb.dma = 0;
dma              1473 drivers/net/ethernet/atheros/atlx/atl1.c 	iowrite32((u32) ((adapter->tpd_ring.dma & 0xffffffff00000000ULL) >> 32),
dma              1476 drivers/net/ethernet/atheros/atlx/atl1.c 	iowrite32((u32) (adapter->rfd_ring.dma & 0x00000000ffffffffULL),
dma              1478 drivers/net/ethernet/atheros/atlx/atl1.c 	iowrite32((u32) (adapter->rrd_ring.dma & 0x00000000ffffffffULL),
dma              1480 drivers/net/ethernet/atheros/atlx/atl1.c 	iowrite32((u32) (adapter->tpd_ring.dma & 0x00000000ffffffffULL),
dma              1482 drivers/net/ethernet/atheros/atlx/atl1.c 	iowrite32((u32) (adapter->cmb.dma & 0x00000000ffffffffULL),
dma              1484 drivers/net/ethernet/atheros/atlx/atl1.c 	iowrite32((u32) (adapter->smb.dma & 0x00000000ffffffffULL),
dma              1871 drivers/net/ethernet/atheros/atlx/atl1.c 		buffer_info->dma = pci_map_page(pdev, page, offset,
dma              1874 drivers/net/ethernet/atheros/atlx/atl1.c 		rfd_desc->buffer_addr = cpu_to_le64(buffer_info->dma);
dma              1997 drivers/net/ethernet/atheros/atlx/atl1.c 		pci_unmap_page(adapter->pdev, buffer_info->dma,
dma              1999 drivers/net/ethernet/atheros/atlx/atl1.c 		buffer_info->dma = 0;
dma              2066 drivers/net/ethernet/atheros/atlx/atl1.c 		if (buffer_info->dma) {
dma              2067 drivers/net/ethernet/atheros/atlx/atl1.c 			pci_unmap_page(adapter->pdev, buffer_info->dma,
dma              2069 drivers/net/ethernet/atheros/atlx/atl1.c 			buffer_info->dma = 0;
dma              2215 drivers/net/ethernet/atheros/atlx/atl1.c 		buffer_info->dma = pci_map_page(adapter->pdev, page,
dma              2240 drivers/net/ethernet/atheros/atlx/atl1.c 				buffer_info->dma = pci_map_page(adapter->pdev,
dma              2252 drivers/net/ethernet/atheros/atlx/atl1.c 		buffer_info->dma = pci_map_page(adapter->pdev, page,
dma              2274 drivers/net/ethernet/atheros/atlx/atl1.c 			buffer_info->dma = skb_frag_dma_map(&adapter->pdev->dev,
dma              2302 drivers/net/ethernet/atheros/atlx/atl1.c 		tpd->buffer_addr = cpu_to_le64(buffer_info->dma);
dma               585 drivers/net/ethernet/atheros/atlx/atl1.h 	dma_addr_t dma;		/* physical address*/
dma               597 drivers/net/ethernet/atheros/atlx/atl1.h 	dma_addr_t dma;
dma               603 drivers/net/ethernet/atheros/atlx/atl1.h 	dma_addr_t dma;		/* descriptor ring physical address */
dma               615 drivers/net/ethernet/atheros/atlx/atl1.h 	dma_addr_t dma;		/* descriptor ring physical address */
dma               626 drivers/net/ethernet/atheros/atlx/atl1.h 	dma_addr_t dma;		/* descriptor ring physical address */
dma               636 drivers/net/ethernet/atheros/atlx/atl1.h 	dma_addr_t dma;
dma               642 drivers/net/ethernet/atheros/atlx/atl1.h 	dma_addr_t dma;
dma               429 drivers/net/ethernet/atheros/atlx/atl2.h     dma_addr_t dma;
dma               228 drivers/net/ethernet/aurora/nb8800.c 	dma_addr_t dma = rxd->desc.s_addr;
dma               243 drivers/net/ethernet/aurora/nb8800.c 		dma_sync_single_for_cpu(&dev->dev, dma, len, DMA_FROM_DEVICE);
dma               245 drivers/net/ethernet/aurora/nb8800.c 		dma_sync_single_for_device(&dev->dev, dma, len,
dma               256 drivers/net/ethernet/aurora/nb8800.c 		dma_unmap_page(&dev->dev, dma, RX_BUF_SIZE, DMA_FROM_DEVICE);
dma              5216 drivers/net/ethernet/broadcom/bnx2.c bnx2_init_rxbd_rings(struct bnx2_rx_bd *rx_ring[], dma_addr_t dma[],
dma              5234 drivers/net/ethernet/broadcom/bnx2.c 		rxbd->rx_bd_haddr_hi = (u64) dma[j] >> 32;
dma              5235 drivers/net/ethernet/broadcom/bnx2.c 		rxbd->rx_bd_haddr_lo = (u64) dma[j] & 0xffffffff;
dma               732 drivers/net/ethernet/broadcom/cnic.c static void cnic_free_dma(struct cnic_dev *dev, struct cnic_dma *dma)
dma               736 drivers/net/ethernet/broadcom/cnic.c 	if (!dma->pg_arr)
dma               739 drivers/net/ethernet/broadcom/cnic.c 	for (i = 0; i < dma->num_pages; i++) {
dma               740 drivers/net/ethernet/broadcom/cnic.c 		if (dma->pg_arr[i]) {
dma               742 drivers/net/ethernet/broadcom/cnic.c 					  dma->pg_arr[i], dma->pg_map_arr[i]);
dma               743 drivers/net/ethernet/broadcom/cnic.c 			dma->pg_arr[i] = NULL;
dma               746 drivers/net/ethernet/broadcom/cnic.c 	if (dma->pgtbl) {
dma               747 drivers/net/ethernet/broadcom/cnic.c 		dma_free_coherent(&dev->pcidev->dev, dma->pgtbl_size,
dma               748 drivers/net/ethernet/broadcom/cnic.c 				  dma->pgtbl, dma->pgtbl_map);
dma               749 drivers/net/ethernet/broadcom/cnic.c 		dma->pgtbl = NULL;
dma               751 drivers/net/ethernet/broadcom/cnic.c 	kfree(dma->pg_arr);
dma               752 drivers/net/ethernet/broadcom/cnic.c 	dma->pg_arr = NULL;
dma               753 drivers/net/ethernet/broadcom/cnic.c 	dma->num_pages = 0;
dma               756 drivers/net/ethernet/broadcom/cnic.c static void cnic_setup_page_tbl(struct cnic_dev *dev, struct cnic_dma *dma)
dma               759 drivers/net/ethernet/broadcom/cnic.c 	__le32 *page_table = (__le32 *) dma->pgtbl;
dma               761 drivers/net/ethernet/broadcom/cnic.c 	for (i = 0; i < dma->num_pages; i++) {
dma               763 drivers/net/ethernet/broadcom/cnic.c 		*page_table = cpu_to_le32((u64) dma->pg_map_arr[i] >> 32);
dma               765 drivers/net/ethernet/broadcom/cnic.c 		*page_table = cpu_to_le32(dma->pg_map_arr[i] & 0xffffffff);
dma               770 drivers/net/ethernet/broadcom/cnic.c static void cnic_setup_page_tbl_le(struct cnic_dev *dev, struct cnic_dma *dma)
dma               773 drivers/net/ethernet/broadcom/cnic.c 	__le32 *page_table = (__le32 *) dma->pgtbl;
dma               775 drivers/net/ethernet/broadcom/cnic.c 	for (i = 0; i < dma->num_pages; i++) {
dma               777 drivers/net/ethernet/broadcom/cnic.c 		*page_table = cpu_to_le32(dma->pg_map_arr[i] & 0xffffffff);
dma               779 drivers/net/ethernet/broadcom/cnic.c 		*page_table = cpu_to_le32((u64) dma->pg_map_arr[i] >> 32);
dma               784 drivers/net/ethernet/broadcom/cnic.c static int cnic_alloc_dma(struct cnic_dev *dev, struct cnic_dma *dma,
dma               791 drivers/net/ethernet/broadcom/cnic.c 	dma->pg_arr = kzalloc(size, GFP_ATOMIC);
dma               792 drivers/net/ethernet/broadcom/cnic.c 	if (dma->pg_arr == NULL)
dma               795 drivers/net/ethernet/broadcom/cnic.c 	dma->pg_map_arr = (dma_addr_t *) (dma->pg_arr + pages);
dma               796 drivers/net/ethernet/broadcom/cnic.c 	dma->num_pages = pages;
dma               799 drivers/net/ethernet/broadcom/cnic.c 		dma->pg_arr[i] = dma_alloc_coherent(&dev->pcidev->dev,
dma               801 drivers/net/ethernet/broadcom/cnic.c 						    &dma->pg_map_arr[i],
dma               803 drivers/net/ethernet/broadcom/cnic.c 		if (dma->pg_arr[i] == NULL)
dma               809 drivers/net/ethernet/broadcom/cnic.c 	dma->pgtbl_size = ((pages * 8) + CNIC_PAGE_SIZE - 1) &
dma               811 drivers/net/ethernet/broadcom/cnic.c 	dma->pgtbl = dma_alloc_coherent(&dev->pcidev->dev, dma->pgtbl_size,
dma               812 drivers/net/ethernet/broadcom/cnic.c 					&dma->pgtbl_map, GFP_ATOMIC);
dma               813 drivers/net/ethernet/broadcom/cnic.c 	if (dma->pgtbl == NULL)
dma               816 drivers/net/ethernet/broadcom/cnic.c 	cp->setup_pgtbl(dev, dma);
dma               821 drivers/net/ethernet/broadcom/cnic.c 	cnic_free_dma(dev, dma);
dma               897 drivers/net/ethernet/broadcom/cnic.c 	cnic_free_dma(dev, &cp->kcq2.dma);
dma               898 drivers/net/ethernet/broadcom/cnic.c 	cnic_free_dma(dev, &cp->kcq1.dma);
dma               993 drivers/net/ethernet/broadcom/cnic.c 	err = cnic_alloc_dma(dev, &info->dma, KCQ_PAGE_CNT, use_page_tbl);
dma               997 drivers/net/ethernet/broadcom/cnic.c 	kcq = (struct kcqe **) info->dma.pg_arr;
dma              1015 drivers/net/ethernet/broadcom/cnic.c 		next->addr_hi = (u64) info->dma.pg_map_arr[j] >> 32;
dma              1016 drivers/net/ethernet/broadcom/cnic.c 		next->addr_lo = info->dma.pg_map_arr[j] & 0xffffffff;
dma              2321 drivers/net/ethernet/broadcom/cnic.c 	fcoe_init->eq_pbl_base.lo = cp->kcq2.dma.pgtbl_map & 0xffffffff;
dma              2322 drivers/net/ethernet/broadcom/cnic.c 	fcoe_init->eq_pbl_base.hi = (u64) cp->kcq2.dma.pgtbl_map >> 32;
dma              2323 drivers/net/ethernet/broadcom/cnic.c 	fcoe_init->eq_pbl_size = cp->kcq2.dma.num_pages;
dma              4789 drivers/net/ethernet/broadcom/cnic.c 	val = (u32) ((u64) cp->kcq1.dma.pgtbl_map >> 32);
dma              4792 drivers/net/ethernet/broadcom/cnic.c 	val = (u32) cp->kcq1.dma.pgtbl_map;
dma              5127 drivers/net/ethernet/broadcom/cnic.c 		cp->kcq1.dma.pg_map_arr[1] & 0xffffffff);
dma              5130 drivers/net/ethernet/broadcom/cnic.c 		(u64) cp->kcq1.dma.pg_map_arr[1] >> 32);
dma              5133 drivers/net/ethernet/broadcom/cnic.c 		cp->kcq1.dma.pg_map_arr[0] & 0xffffffff);
dma              5136 drivers/net/ethernet/broadcom/cnic.c 		(u64) cp->kcq1.dma.pg_map_arr[0] >> 32);
dma               178 drivers/net/ethernet/broadcom/cnic.h 	struct cnic_dma	dma;
dma              1691 drivers/net/ethernet/brocade/bna/bna_enet.c 	u64 dma;
dma              1705 drivers/net/ethernet/brocade/bna/bna_enet.c 		&res_info[BNA_RES_MEM_T_ATTR].res_u.mem_info.mdl[0].dma, dma);
dma              1707 drivers/net/ethernet/brocade/bna/bna_enet.c 	bfa_nw_ioc_mem_claim(&ioceth->ioc, kva, dma);
dma              1717 drivers/net/ethernet/brocade/bna/bna_enet.c 		&res_info[BNA_RES_MEM_T_COM].res_u.mem_info.mdl[0].dma, dma);
dma              1720 drivers/net/ethernet/brocade/bna/bna_enet.c 	bfa_nw_cee_mem_claim(&bna->cee, kva, dma);
dma              1722 drivers/net/ethernet/brocade/bna/bna_enet.c 	dma += bfa_nw_cee_meminfo();
dma              1725 drivers/net/ethernet/brocade/bna/bna_enet.c 	bfa_nw_flash_memclaim(&bna->flash, kva, dma);
dma              1727 drivers/net/ethernet/brocade/bna/bna_enet.c 	dma += bfa_nw_flash_meminfo();
dma              1730 drivers/net/ethernet/brocade/bna/bna_enet.c 	bfa_msgq_memclaim(&bna->msgq, kva, dma);
dma              1733 drivers/net/ethernet/brocade/bna/bna_enet.c 	dma += bfa_msgq_meminfo();
dma              1975 drivers/net/ethernet/brocade/bna/bna_enet.c 		res_info[BNA_RES_MEM_T_STATS].res_u.mem_info.mdl[0].dma.msb;
dma              1977 drivers/net/ethernet/brocade/bna/bna_enet.c 		res_info[BNA_RES_MEM_T_STATS].res_u.mem_info.mdl[0].dma.lsb;
dma              1862 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 	u64 dma;
dma              1866 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 	rxq->qpt.hw_qpt_ptr.lsb = qpt_mem->dma.lsb;
dma              1867 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 	rxq->qpt.hw_qpt_ptr.msb = qpt_mem->dma.msb;
dma              1876 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 	BNA_GET_DMA_ADDR(&page_mem->dma, dma);
dma              1882 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 		BNA_SET_DMA_ADDR(dma, &bna_dma);
dma              1887 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 		dma += PAGE_SIZE;
dma              1900 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 	u64 dma;
dma              1904 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 	rxp->cq.qpt.hw_qpt_ptr.lsb = qpt_mem->dma.lsb;
dma              1905 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 	rxp->cq.qpt.hw_qpt_ptr.msb = qpt_mem->dma.msb;
dma              1914 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 	BNA_GET_DMA_ADDR(&page_mem->dma, dma);
dma              1920 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 		BNA_SET_DMA_ADDR(dma, &bna_dma);
dma              1925 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 		dma += PAGE_SIZE;
dma              2361 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 		res_info[BNA_RX_RES_MEM_T_IBIDX].res_u.mem_info.mdl[i].dma.lsb;
dma              2363 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 		res_info[BNA_RX_RES_MEM_T_IBIDX].res_u.mem_info.mdl[i].dma.msb;
dma              3167 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 	u64 dma;
dma              3171 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 	txq->qpt.hw_qpt_ptr.lsb = qpt_mem->dma.lsb;
dma              3172 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 	txq->qpt.hw_qpt_ptr.msb = qpt_mem->dma.msb;
dma              3181 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 	BNA_GET_DMA_ADDR(&page_mem->dma, dma);
dma              3187 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 		BNA_SET_DMA_ADDR(dma, &bna_dma);
dma              3192 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 		dma += PAGE_SIZE;
dma              3449 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 		res_info[BNA_TX_RES_MEM_T_IBIDX].res_u.mem_info.mdl[i].dma.lsb;
dma              3451 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 		res_info[BNA_TX_RES_MEM_T_IBIDX].res_u.mem_info.mdl[i].dma.msb;
dma               264 drivers/net/ethernet/brocade/bna/bna_types.h 	struct bna_dma_addr dma;
dma              1323 drivers/net/ethernet/brocade/bna/bnad.c 				BNA_GET_DMA_ADDR(&(mem_info->mdl[i].dma),
dma              1364 drivers/net/ethernet/brocade/bna/bnad.c 					 &(mem_info->mdl[i].dma));
dma               139 drivers/net/ethernet/cavium/liquidio/octeon_config.h #define CFG_GET_DMA_INTR_PKT(cfg)                ((cfg)->dma.dma_intr_pkt)
dma               140 drivers/net/ethernet/cavium/liquidio/octeon_config.h #define CFG_GET_DMA_INTR_TIME(cfg)               ((cfg)->dma.dma_intr_time)
dma               151 drivers/net/ethernet/cavium/liquidio/octeon_droq.c 		if (pg_info->dma)
dma               153 drivers/net/ethernet/cavium/liquidio/octeon_droq.c 				       (u64)pg_info->dma);
dma               154 drivers/net/ethernet/cavium/liquidio/octeon_droq.c 		pg_info->dma = 0;
dma               367 drivers/net/ethernet/cavium/liquidio/octeon_droq.c 				       (u64)pg_info->dma);
dma               369 drivers/net/ethernet/cavium/liquidio/octeon_droq.c 			pg_info->dma = 0;
dma                65 drivers/net/ethernet/cavium/liquidio/octeon_droq.h 	dma_addr_t dma;
dma               286 drivers/net/ethernet/cavium/liquidio/octeon_network.h 	pg_info->dma = dma_map_page(&oct->pci_dev->dev, page, 0,
dma               290 drivers/net/ethernet/cavium/liquidio/octeon_network.h 	if (dma_mapping_error(&oct->pci_dev->dev, pg_info->dma)) {
dma               301 drivers/net/ethernet/cavium/liquidio/octeon_network.h 	skb_pg_info->dma = pg_info->dma;
dma               325 drivers/net/ethernet/cavium/liquidio/octeon_network.h 	skb_pg_info->dma = 0;
dma               344 drivers/net/ethernet/cavium/liquidio/octeon_network.h 			       pg_info->dma, (PAGE_SIZE << 0),
dma               346 drivers/net/ethernet/cavium/liquidio/octeon_network.h 		pg_info->dma = 0;
dma               371 drivers/net/ethernet/cavium/liquidio/octeon_network.h 			       pg_info->dma, (PAGE_SIZE << 0),
dma               385 drivers/net/ethernet/cavium/liquidio/octeon_network.h 	skb_pg_info->dma = pg_info->dma;
dma               396 drivers/net/ethernet/cavium/liquidio/octeon_network.h 	pg_info->dma = 0;
dma               413 drivers/net/ethernet/cavium/liquidio/octeon_network.h 		pg_info->dma = 0;
dma               464 drivers/net/ethernet/cavium/liquidio/octeon_network.h 	dma_addr = pg_info->dma;
dma               465 drivers/net/ethernet/cavium/liquidio/octeon_network.h 	if (!pg_info->dma) {
dma                60 drivers/net/ethernet/cavium/thunder/nicvf_queues.c 						&dmem->dma, GFP_KERNEL);
dma                65 drivers/net/ethernet/cavium/thunder/nicvf_queues.c 	dmem->phys_base = NICVF_ALIGNED_ADDR((u64)dmem->dma, align_bytes);
dma                66 drivers/net/ethernet/cavium/thunder/nicvf_queues.c 	dmem->base = dmem->unalign_base + (dmem->phys_base - dmem->dma);
dma                77 drivers/net/ethernet/cavium/thunder/nicvf_queues.c 			  dmem->unalign_base, dmem->dma);
dma               207 drivers/net/ethernet/cavium/thunder/nicvf_queues.h 	dma_addr_t	dma;
dma               153 drivers/net/ethernet/cirrus/cs89x0.c 	int dma;		/* DMA channel */
dma               341 drivers/net/ethernet/cirrus/cs89x0.c 	if (lp->dma) {
dma               342 drivers/net/ethernet/cirrus/cs89x0.c 		dev->dma = lp->dma;
dma               347 drivers/net/ethernet/cirrus/cs89x0.c 		dev->dma = lp->isa_config & DMA_NO_MASK;
dma               349 drivers/net/ethernet/cirrus/cs89x0.c 			dev->dma += 5;
dma               350 drivers/net/ethernet/cirrus/cs89x0.c 		if (dev->dma < 5 || dev->dma > 7) {
dma               358 drivers/net/ethernet/cirrus/cs89x0.c write_dma(struct net_device *dev, int chip_type, int dma)
dma               364 drivers/net/ethernet/cirrus/cs89x0.c 		writereg(dev, PP_CS8900_ISADMA, dma - 5);
dma               366 drivers/net/ethernet/cirrus/cs89x0.c 		writereg(dev, PP_CS8920_ISADMA, dma);
dma               905 drivers/net/ethernet/cirrus/cs89x0.c 		if (request_dma(dev->dma, dev->name)) {
dma               907 drivers/net/ethernet/cirrus/cs89x0.c 			       dev->name, dev->dma);
dma               910 drivers/net/ethernet/cirrus/cs89x0.c 		write_dma(dev, lp->chip_type, dev->dma);
dma               914 drivers/net/ethernet/cirrus/cs89x0.c 		disable_dma(dev->dma);
dma               915 drivers/net/ethernet/cirrus/cs89x0.c 		clear_dma_ff(dev->dma);
dma               916 drivers/net/ethernet/cirrus/cs89x0.c 		set_dma_mode(dev->dma, DMA_RX_MODE); /* auto_init as well */
dma               917 drivers/net/ethernet/cirrus/cs89x0.c 		set_dma_addr(dev->dma, isa_virt_to_bus(lp->dma_buff));
dma               918 drivers/net/ethernet/cirrus/cs89x0.c 		set_dma_count(dev->dma, lp->dmasize * 1024);
dma               919 drivers/net/ethernet/cirrus/cs89x0.c 		enable_dma(dev->dma);
dma               961 drivers/net/ethernet/cirrus/cs89x0.c 		free_dma(dev->dma);
dma              1103 drivers/net/ethernet/cirrus/cs89x0.c 	if (lp->use_dma && lp->dma) {
dma              1104 drivers/net/ethernet/cirrus/cs89x0.c 		free_dma(dev->dma);
dma              1327 drivers/net/ethernet/cirrus/cs89x0.c 			lp->dma = g_cs89x0_dma;
dma              1556 drivers/net/ethernet/cirrus/cs89x0.c 		pr_cont(", DMA %d", dev->dma);
dma              1703 drivers/net/ethernet/cirrus/cs89x0.c static int dma;
dma              1711 drivers/net/ethernet/cirrus/cs89x0.c module_param_hw(dma , int, dma, 0);
dma              1725 drivers/net/ethernet/cirrus/cs89x0.c MODULE_PARM_DESC(dma , "cs89x0 ISA DMA channel; ignored if use_dma=0");
dma              1729 drivers/net/ethernet/cirrus/cs89x0.c MODULE_PARM_DESC(dma , "(ignored)");
dma              1781 drivers/net/ethernet/cirrus/cs89x0.c 		lp->dma = dma;
dma               122 drivers/net/ethernet/emulex/benet/be.h 	dma_addr_t dma;
dma               678 drivers/net/ethernet/emulex/benet/be_cmds.c 	val |= (upper_32_bits(mbox_mem->dma) >> 2) << 2;
dma               688 drivers/net/ethernet/emulex/benet/be_cmds.c 	val |= (u32)(mbox_mem->dma >> 4) << 2;
dma               809 drivers/net/ethernet/emulex/benet/be_cmds.c 		sge->pa_hi = cpu_to_le32(upper_32_bits(mem->dma));
dma               810 drivers/net/ethernet/emulex/benet/be_cmds.c 		sge->pa_lo = cpu_to_le32(mem->dma & 0xFFFFFFFF);
dma               821 drivers/net/ethernet/emulex/benet/be_cmds.c 	u64 dma = (u64)mem->dma;
dma               824 drivers/net/ethernet/emulex/benet/be_cmds.c 		pages[i].lo = cpu_to_le32(dma & 0xFFFFFFFF);
dma               825 drivers/net/ethernet/emulex/benet/be_cmds.c 		pages[i].hi = cpu_to_le32(upper_32_bits(dma));
dma               826 drivers/net/ethernet/emulex/benet/be_cmds.c 		dma += PAGE_SIZE_4K;
dma              1807 drivers/net/ethernet/emulex/benet/be_cmds.c 					    &get_fat_cmd.dma, GFP_ATOMIC);
dma              1850 drivers/net/ethernet/emulex/benet/be_cmds.c 			  get_fat_cmd.va, get_fat_cmd.dma);
dma              2299 drivers/net/ethernet/emulex/benet/be_cmds.c 	cmd.va = dma_alloc_coherent(&adapter->pdev->dev, cmd.size, &cmd.dma,
dma              2329 drivers/net/ethernet/emulex/benet/be_cmds.c 	dma_free_coherent(&adapter->pdev->dev, cmd.size, cmd.va, cmd.dma);
dma              2377 drivers/net/ethernet/emulex/benet/be_cmds.c 	req->addr_low = cpu_to_le32((cmd->dma +
dma              2380 drivers/net/ethernet/emulex/benet/be_cmds.c 	req->addr_high = cpu_to_le32(upper_32_bits(cmd->dma +
dma              2511 drivers/net/ethernet/emulex/benet/be_cmds.c 	req->addr_low = cpu_to_le32((cmd->dma & 0xFFFFFFFF));
dma              2512 drivers/net/ethernet/emulex/benet/be_cmds.c 	req->addr_high = cpu_to_le32(upper_32_bits(cmd->dma));
dma              3063 drivers/net/ethernet/emulex/benet/be_cmds.c 	flash_cmd.va = dma_alloc_coherent(dev, flash_cmd.size, &flash_cmd.dma,
dma              3101 drivers/net/ethernet/emulex/benet/be_cmds.c 	dma_free_coherent(dev, flash_cmd.size, flash_cmd.va, flash_cmd.dma);
dma              3181 drivers/net/ethernet/emulex/benet/be_cmds.c 	flash_cmd.va = dma_alloc_coherent(dev, flash_cmd.size, &flash_cmd.dma,
dma              3203 drivers/net/ethernet/emulex/benet/be_cmds.c 	dma_free_coherent(dev, flash_cmd.size, flash_cmd.va, flash_cmd.dma);
dma              3432 drivers/net/ethernet/emulex/benet/be_cmds.c 	cmd.va = dma_alloc_coherent(&adapter->pdev->dev, cmd.size, &cmd.dma,
dma              3467 drivers/net/ethernet/emulex/benet/be_cmds.c 	dma_free_coherent(&adapter->pdev->dev, cmd.size, cmd.va, cmd.dma);
dma              3521 drivers/net/ethernet/emulex/benet/be_cmds.c 					    &attribs_cmd.dma, GFP_ATOMIC);
dma              3558 drivers/net/ethernet/emulex/benet/be_cmds.c 				  attribs_cmd.va, attribs_cmd.dma);
dma              3698 drivers/net/ethernet/emulex/benet/be_cmds.c 						 &get_mac_list_cmd.dma,
dma              3773 drivers/net/ethernet/emulex/benet/be_cmds.c 			  get_mac_list_cmd.va, get_mac_list_cmd.dma);
dma              3826 drivers/net/ethernet/emulex/benet/be_cmds.c 	cmd.va = dma_alloc_coherent(&adapter->pdev->dev, cmd.size, &cmd.dma,
dma              3852 drivers/net/ethernet/emulex/benet/be_cmds.c 	dma_free_coherent(&adapter->pdev->dev, cmd.size, cmd.va, cmd.dma);
dma              4032 drivers/net/ethernet/emulex/benet/be_cmds.c 	cmd.va = dma_alloc_coherent(&adapter->pdev->dev, cmd.size, &cmd.dma,
dma              4072 drivers/net/ethernet/emulex/benet/be_cmds.c 				  cmd.dma);
dma              4087 drivers/net/ethernet/emulex/benet/be_cmds.c 					   extfat_cmd.size, &extfat_cmd.dma,
dma              4111 drivers/net/ethernet/emulex/benet/be_cmds.c 			  extfat_cmd.dma);
dma              4125 drivers/net/ethernet/emulex/benet/be_cmds.c 					   extfat_cmd.size, &extfat_cmd.dma,
dma              4145 drivers/net/ethernet/emulex/benet/be_cmds.c 			  extfat_cmd.dma);
dma              4351 drivers/net/ethernet/emulex/benet/be_cmds.c 	cmd.va = dma_alloc_coherent(&adapter->pdev->dev, cmd.size, &cmd.dma,
dma              4402 drivers/net/ethernet/emulex/benet/be_cmds.c 				  cmd.dma);
dma              4449 drivers/net/ethernet/emulex/benet/be_cmds.c 	cmd.va = dma_alloc_coherent(&adapter->pdev->dev, cmd.size, &cmd.dma,
dma              4521 drivers/net/ethernet/emulex/benet/be_cmds.c 				  cmd.dma);
dma              4536 drivers/net/ethernet/emulex/benet/be_cmds.c 	cmd.va = dma_alloc_coherent(&adapter->pdev->dev, cmd.size, &cmd.dma,
dma              4554 drivers/net/ethernet/emulex/benet/be_cmds.c 				  cmd.dma);
dma               274 drivers/net/ethernet/emulex/benet/be_ethtool.c 					 &read_cmd.dma, GFP_ATOMIC);
dma               299 drivers/net/ethernet/emulex/benet/be_ethtool.c 			  read_cmd.dma);
dma               815 drivers/net/ethernet/emulex/benet/be_ethtool.c 	cmd.va = dma_alloc_coherent(dev, cmd.size, &cmd.dma, GFP_KERNEL);
dma               838 drivers/net/ethernet/emulex/benet/be_ethtool.c 	dma_free_coherent(dev, cmd.size, cmd.va, cmd.dma);
dma               852 drivers/net/ethernet/emulex/benet/be_ethtool.c 					   ddrdma_cmd.size, &ddrdma_cmd.dma,
dma               866 drivers/net/ethernet/emulex/benet/be_ethtool.c 			  ddrdma_cmd.dma);
dma              1031 drivers/net/ethernet/emulex/benet/be_ethtool.c 					   eeprom_cmd.size, &eeprom_cmd.dma,
dma              1044 drivers/net/ethernet/emulex/benet/be_ethtool.c 			  eeprom_cmd.dma);
dma               152 drivers/net/ethernet/emulex/benet/be_main.c 				  mem->dma);
dma               167 drivers/net/ethernet/emulex/benet/be_main.c 				     &mem->dma, GFP_KERNEL);
dma               904 drivers/net/ethernet/emulex/benet/be_main.c 	dma_addr_t dma;
dma               908 drivers/net/ethernet/emulex/benet/be_main.c 	dma = (u64)le32_to_cpu(wrb->frag_pa_hi) << 32 |
dma               912 drivers/net/ethernet/emulex/benet/be_main.c 			dma_unmap_single(dev, dma, frag_len, DMA_TO_DEVICE);
dma               914 drivers/net/ethernet/emulex/benet/be_main.c 			dma_unmap_page(dev, dma, frag_len, DMA_TO_DEVICE);
dma              5747 drivers/net/ethernet/emulex/benet/be_main.c 		dma_free_coherent(dev, mem->size, mem->va, mem->dma);
dma              5751 drivers/net/ethernet/emulex/benet/be_main.c 		dma_free_coherent(dev, mem->size, mem->va, mem->dma);
dma              5755 drivers/net/ethernet/emulex/benet/be_main.c 		dma_free_coherent(dev, mem->size, mem->va, mem->dma);
dma              5770 drivers/net/ethernet/emulex/benet/be_main.c 						&mbox_mem_alloc->dma,
dma              5777 drivers/net/ethernet/emulex/benet/be_main.c 	mbox_mem_align->dma = PTR_ALIGN(mbox_mem_alloc->dma, 16);
dma              5781 drivers/net/ethernet/emulex/benet/be_main.c 					   &rx_filter->dma, GFP_KERNEL);
dma              5796 drivers/net/ethernet/emulex/benet/be_main.c 					   &stats_cmd->dma, GFP_KERNEL);
dma              5827 drivers/net/ethernet/emulex/benet/be_main.c 	dma_free_coherent(dev, rx_filter->size, rx_filter->va, rx_filter->dma);
dma              5830 drivers/net/ethernet/emulex/benet/be_main.c 			  mbox_mem_alloc->dma);
dma                89 drivers/net/ethernet/freescale/enetc/enetc.c 		dma_unmap_page(tx_ring->dev, tx_swbd->dma,
dma                92 drivers/net/ethernet/freescale/enetc/enetc.c 		dma_unmap_single(tx_ring->dev, tx_swbd->dma,
dma                94 drivers/net/ethernet/freescale/enetc/enetc.c 	tx_swbd->dma = 0;
dma               100 drivers/net/ethernet/freescale/enetc/enetc.c 	if (tx_swbd->dma)
dma               120 drivers/net/ethernet/freescale/enetc/enetc.c 	dma_addr_t dma;
dma               127 drivers/net/ethernet/freescale/enetc/enetc.c 	dma = dma_map_single(tx_ring->dev, skb->data, len, DMA_TO_DEVICE);
dma               128 drivers/net/ethernet/freescale/enetc/enetc.c 	if (unlikely(dma_mapping_error(tx_ring->dev, dma)))
dma               131 drivers/net/ethernet/freescale/enetc/enetc.c 	temp_bd.addr = cpu_to_le64(dma);
dma               136 drivers/net/ethernet/freescale/enetc/enetc.c 	tx_swbd->dma = dma;
dma               192 drivers/net/ethernet/freescale/enetc/enetc.c 		dma = skb_frag_dma_map(tx_ring->dev, frag, 0, len,
dma               194 drivers/net/ethernet/freescale/enetc/enetc.c 		if (dma_mapping_error(tx_ring->dev, dma))
dma               211 drivers/net/ethernet/freescale/enetc/enetc.c 		temp_bd.addr = cpu_to_le64(dma);
dma               214 drivers/net/ethernet/freescale/enetc/enetc.c 		tx_swbd->dma = dma;
dma               363 drivers/net/ethernet/freescale/enetc/enetc.c 		if (likely(tx_swbd->dma))
dma               427 drivers/net/ethernet/freescale/enetc/enetc.c 	rx_swbd->dma = addr;
dma               454 drivers/net/ethernet/freescale/enetc/enetc.c 		rxbd->w.addr = cpu_to_le64(rx_swbd->dma +
dma               561 drivers/net/ethernet/freescale/enetc/enetc.c 	dma_sync_single_range_for_cpu(rx_ring->dev, rx_swbd->dma,
dma               577 drivers/net/ethernet/freescale/enetc/enetc.c 		dma_sync_single_range_for_device(rx_ring->dev, rx_swbd->dma,
dma               582 drivers/net/ethernet/freescale/enetc/enetc.c 		dma_unmap_page(rx_ring->dev, rx_swbd->dma,
dma               923 drivers/net/ethernet/freescale/enetc/enetc.c 		dma_unmap_page(rx_ring->dev, rx_swbd->dma,
dma                22 drivers/net/ethernet/freescale/enetc/enetc.h 	dma_addr_t dma;
dma                36 drivers/net/ethernet/freescale/enetc/enetc.h 	dma_addr_t dma;
dma               110 drivers/net/ethernet/freescale/enetc/enetc.h 	dma_addr_t dma;
dma               118 drivers/net/ethernet/freescale/enetc/enetc_cbdr.c 	dma_addr_t dma, dma_align;
dma               130 drivers/net/ethernet/freescale/enetc/enetc_cbdr.c 				 &dma, GFP_KERNEL);
dma               136 drivers/net/ethernet/freescale/enetc/enetc_cbdr.c 	dma_align = ALIGN(dma, RFSE_ALIGN);
dma               148 drivers/net/ethernet/freescale/enetc/enetc_cbdr.c 			  tmp, dma);
dma               158 drivers/net/ethernet/freescale/enetc/enetc_cbdr.c 	dma_addr_t dma, dma_align;
dma               167 drivers/net/ethernet/freescale/enetc/enetc_cbdr.c 				 &dma, GFP_KERNEL);
dma               172 drivers/net/ethernet/freescale/enetc/enetc_cbdr.c 	dma_align = ALIGN(dma, RSSE_ALIGN);
dma               195 drivers/net/ethernet/freescale/enetc/enetc_cbdr.c 	dma_free_coherent(&si->pdev->dev, count + RSSE_ALIGN, tmp, dma);
dma                76 drivers/net/ethernet/freescale/enetc/enetc_msg.c 	msg->vaddr = dma_alloc_coherent(dev, msg->size, &msg->dma,
dma                85 drivers/net/ethernet/freescale/enetc/enetc_msg.c 	val = lower_32_bits(msg->dma);
dma                87 drivers/net/ethernet/freescale/enetc/enetc_msg.c 	val = upper_32_bits(msg->dma);
dma               100 drivers/net/ethernet/freescale/enetc/enetc_msg.c 	dma_free_coherent(&si->pdev->dev, msg->size, msg->vaddr, msg->dma);
dma                22 drivers/net/ethernet/freescale/enetc/enetc_vf.c 	val = enetc_vsi_set_msize(msg->size) | lower_32_bits(msg->dma);
dma                23 drivers/net/ethernet/freescale/enetc/enetc_vf.c 	enetc_wr(hw, ENETC_VSIMSGSNDAR1, upper_32_bits(msg->dma));
dma                63 drivers/net/ethernet/freescale/enetc/enetc_vf.c 	msg.vaddr = dma_alloc_coherent(priv->dev, msg.size, &msg.dma,
dma                79 drivers/net/ethernet/freescale/enetc/enetc_vf.c 	dma_free_coherent(priv->dev, msg.size, msg.vaddr, msg.dma);
dma               467 drivers/net/ethernet/freescale/fec.h 	dma_addr_t	dma;
dma               906 drivers/net/ethernet/freescale/fec_main.c 		writel(rxq->bd.dma, fep->hwp + FEC_R_DES_START(i));
dma               917 drivers/net/ethernet/freescale/fec_main.c 		writel(txq->bd.dma, fep->hwp + FEC_X_DES_START(i));
dma              3280 drivers/net/ethernet/freescale/fec_main.c 		rxq->bd.dma = bd_dma;
dma              3296 drivers/net/ethernet/freescale/fec_main.c 		txq->bd.dma = bd_dma;
dma               257 drivers/net/ethernet/freescale/fs_enet/fs_enet-main.c 					dma_addr_t dma;
dma               266 drivers/net/ethernet/freescale/fs_enet/fs_enet-main.c 					dma = dma_map_single(fep->dev,
dma               270 drivers/net/ethernet/freescale/fs_enet/fs_enet-main.c 					CBDW_BUFADDR(bdp, dma);
dma              1134 drivers/net/ethernet/freescale/gianfar.c 		dma_unmap_page(rx_queue->dev, rxb->dma,
dma              1251 drivers/net/ethernet/freescale/gianfar.c 	rxb->dma = addr;
dma              1289 drivers/net/ethernet/freescale/gianfar.c 				rxb->dma + rxb->page_offset + RXBUF_ALIGNMENT);
dma              2430 drivers/net/ethernet/freescale/gianfar.c 	dma_sync_single_range_for_device(rxq->dev, old_rxb->dma,
dma              2454 drivers/net/ethernet/freescale/gianfar.c 	dma_sync_single_range_for_cpu(rx_queue->dev, rxb->dma, rxb->page_offset,
dma              2462 drivers/net/ethernet/freescale/gianfar.c 		dma_unmap_page(rx_queue->dev, rxb->dma,
dma               989 drivers/net/ethernet/freescale/gianfar.h 	dma_addr_t dma;
dma               429 drivers/net/ethernet/google/gve/gve.h int gve_alloc_page(struct device *dev, struct page **page, dma_addr_t *dma,
dma               431 drivers/net/ethernet/google/gve/gve.h void gve_free_page(struct device *dev, struct page *page, dma_addr_t dma,
dma               517 drivers/net/ethernet/google/gve/gve_main.c int gve_alloc_page(struct device *dev, struct page **page, dma_addr_t *dma,
dma               523 drivers/net/ethernet/google/gve/gve_main.c 	*dma = dma_map_page(dev, *page, 0, PAGE_SIZE, dir);
dma               524 drivers/net/ethernet/google/gve/gve_main.c 	if (dma_mapping_error(dev, *dma)) {
dma               572 drivers/net/ethernet/google/gve/gve_main.c void gve_free_page(struct device *dev, struct page *page, dma_addr_t dma,
dma               575 drivers/net/ethernet/google/gve/gve_main.c 	if (!dma_mapping_error(dev, dma))
dma               576 drivers/net/ethernet/google/gve/gve_main.c 		dma_unmap_page(dev, dma, PAGE_SIZE, dir);
dma               398 drivers/net/ethernet/google/gve/gve_tx.c 	dma_addr_t dma;
dma               402 drivers/net/ethernet/google/gve/gve_tx.c 		dma = page_buses[page];
dma               403 drivers/net/ethernet/google/gve/gve_tx.c 		dma_sync_single_for_device(dev, dma, PAGE_SIZE, DMA_TO_DEVICE);
dma                69 drivers/net/ethernet/hisilicon/hns/hnae.c 	cb->dma = dma_map_page(ring_to_dev(ring), cb->priv, 0,
dma                72 drivers/net/ethernet/hisilicon/hns/hnae.c 	if (dma_mapping_error(ring_to_dev(ring), cb->dma))
dma                81 drivers/net/ethernet/hisilicon/hns/hnae.c 		dma_unmap_single(ring_to_dev(ring), cb->dma, cb->length,
dma                84 drivers/net/ethernet/hisilicon/hns/hnae.c 		dma_unmap_page(ring_to_dev(ring), cb->dma, cb->length,
dma               214 drivers/net/ethernet/hisilicon/hns/hnae.h 	dma_addr_t dma; /* dma address of this desc */
dma               621 drivers/net/ethernet/hisilicon/hns/hnae.h 	ring->desc[i].addr = cpu_to_le64(ring->desc_cb[i].dma);
dma               637 drivers/net/ethernet/hisilicon/hns/hnae.h 	if (!ring->desc_cb[i].dma)
dma               652 drivers/net/ethernet/hisilicon/hns/hnae.h 	ring->desc[i].addr = cpu_to_le64(ring->desc_cb[i].dma);
dma               659 drivers/net/ethernet/hisilicon/hns/hnae.h 	ring->desc[i].addr = cpu_to_le64(ring->desc_cb[i].dma
dma               673 drivers/net/ethernet/hisilicon/hns/hnae.h 			ring->desc[j].addr = cpu_to_le64(ring->desc_cb[j].dma);
dma               690 drivers/net/ethernet/hisilicon/hns/hnae.h 			    cpu_to_le64(ring->desc_cb[j].dma))
dma               692 drivers/net/ethernet/hisilicon/hns/hnae.h 					cpu_to_le64(ring->desc_cb[j].dma);
dma               251 drivers/net/ethernet/hisilicon/hns/hns_dsaf_rcb.c 	dma_addr_t dma = ring->desc_dma_addr;
dma               255 drivers/net/ethernet/hisilicon/hns/hns_dsaf_rcb.c 			       (u32)dma);
dma               257 drivers/net/ethernet/hisilicon/hns/hns_dsaf_rcb.c 			       (u32)((dma >> 31) >> 1));
dma               267 drivers/net/ethernet/hisilicon/hns/hns_dsaf_rcb.c 			       (u32)dma);
dma               269 drivers/net/ethernet/hisilicon/hns/hns_dsaf_rcb.c 			       (u32)((dma >> 31) >> 1));
dma                37 drivers/net/ethernet/hisilicon/hns/hns_enet.c 			    int send_sz, dma_addr_t dma, int frag_end,
dma                56 drivers/net/ethernet/hisilicon/hns/hns_enet.c 	desc_cb->dma = dma;
dma                59 drivers/net/ethernet/hisilicon/hns/hns_enet.c 	desc->addr = cpu_to_le64(dma);
dma               130 drivers/net/ethernet/hisilicon/hns/hns_enet.c 			 int size, dma_addr_t dma, int frag_end,
dma               133 drivers/net/ethernet/hisilicon/hns/hns_enet.c 	fill_v2_desc_hw(ring, priv, size, size, dma, frag_end,
dma               145 drivers/net/ethernet/hisilicon/hns/hns_enet.c 		      int size, dma_addr_t dma, int frag_end,
dma               158 drivers/net/ethernet/hisilicon/hns/hns_enet.c 	desc_cb->dma = dma;
dma               161 drivers/net/ethernet/hisilicon/hns/hns_enet.c 	desc->addr = cpu_to_le64(dma);
dma               280 drivers/net/ethernet/hisilicon/hns/hns_enet.c 			  int size, dma_addr_t dma, int frag_end,
dma               296 drivers/net/ethernet/hisilicon/hns/hns_enet.c 				dma + BD_MAX_SEND_SIZE * k,
dma               315 drivers/net/ethernet/hisilicon/hns/hns_enet.c 	dma_addr_t dma;
dma               337 drivers/net/ethernet/hisilicon/hns/hns_enet.c 	dma = dma_map_single(dev, skb->data, size, DMA_TO_DEVICE);
dma               338 drivers/net/ethernet/hisilicon/hns/hns_enet.c 	if (dma_mapping_error(dev, dma)) {
dma               343 drivers/net/ethernet/hisilicon/hns/hns_enet.c 	priv->ops.fill_desc(ring, skb, size, dma, seg_num == 1 ? 1 : 0,
dma               350 drivers/net/ethernet/hisilicon/hns/hns_enet.c 		dma = skb_frag_dma_map(dev, frag, 0, size, DMA_TO_DEVICE);
dma               351 drivers/net/ethernet/hisilicon/hns/hns_enet.c 		if (dma_mapping_error(dev, dma)) {
dma               356 drivers/net/ethernet/hisilicon/hns/hns_enet.c 		priv->ops.fill_desc(ring, skb_frag_page(frag), size, dma,
dma               381 drivers/net/ethernet/hisilicon/hns/hns_enet.c 				       ring->desc_cb[ring->next_to_use].dma,
dma               386 drivers/net/ethernet/hisilicon/hns/hns_enet.c 					 ring->desc_cb[next_to_use].dma,
dma                46 drivers/net/ethernet/hisilicon/hns/hns_enet.h 			  int size, dma_addr_t dma, int frag_end,
dma              1116 drivers/net/ethernet/hisilicon/hns3/hns3_enet.c 	dma_addr_t dma;
dma              1126 drivers/net/ethernet/hisilicon/hns3/hns3_enet.c 		dma = dma_map_single(dev, skb->data, size, DMA_TO_DEVICE);
dma              1129 drivers/net/ethernet/hisilicon/hns3/hns3_enet.c 		dma = skb_frag_dma_map(dev, frag, 0, size, DMA_TO_DEVICE);
dma              1132 drivers/net/ethernet/hisilicon/hns3/hns3_enet.c 	if (unlikely(dma_mapping_error(dev, dma))) {
dma              1145 drivers/net/ethernet/hisilicon/hns3/hns3_enet.c 		desc_cb->dma = dma;
dma              1147 drivers/net/ethernet/hisilicon/hns3/hns3_enet.c 		desc->addr = cpu_to_le64(dma);
dma              1167 drivers/net/ethernet/hisilicon/hns3/hns3_enet.c 		desc_cb->dma = dma + HNS3_MAX_BD_SIZE * k;
dma              1172 drivers/net/ethernet/hisilicon/hns3/hns3_enet.c 		desc->addr = cpu_to_le64(dma + HNS3_MAX_BD_SIZE * k);
dma              1305 drivers/net/ethernet/hisilicon/hns3/hns3_enet.c 					 ring->desc_cb[ring->next_to_use].dma,
dma              1310 drivers/net/ethernet/hisilicon/hns3/hns3_enet.c 				       ring->desc_cb[ring->next_to_use].dma,
dma              1315 drivers/net/ethernet/hisilicon/hns3/hns3_enet.c 		ring->desc_cb[ring->next_to_use].dma = 0;
dma              2152 drivers/net/ethernet/hisilicon/hns3/hns3_enet.c 	cb->dma = dma_map_page(ring_to_dev(ring), cb->priv, 0,
dma              2155 drivers/net/ethernet/hisilicon/hns3/hns3_enet.c 	if (unlikely(dma_mapping_error(ring_to_dev(ring), cb->dma)))
dma              2165 drivers/net/ethernet/hisilicon/hns3/hns3_enet.c 		dma_unmap_single(ring_to_dev(ring), cb->dma, cb->length,
dma              2168 drivers/net/ethernet/hisilicon/hns3/hns3_enet.c 		dma_unmap_page(ring_to_dev(ring), cb->dma, cb->length,
dma              2182 drivers/net/ethernet/hisilicon/hns3/hns3_enet.c 	if (!ring->desc_cb[i].dma)
dma              2251 drivers/net/ethernet/hisilicon/hns3/hns3_enet.c 	ring->desc[i].addr = cpu_to_le64(ring->desc_cb[i].dma);
dma              2281 drivers/net/ethernet/hisilicon/hns3/hns3_enet.c 	ring->desc[i].addr = cpu_to_le64(ring->desc_cb[i].dma);
dma              2288 drivers/net/ethernet/hisilicon/hns3/hns3_enet.c 	ring->desc[i].addr = cpu_to_le64(ring->desc_cb[i].dma +
dma              3669 drivers/net/ethernet/hisilicon/hns3/hns3_enet.c 	dma_addr_t dma = ring->desc_dma_addr;
dma              3673 drivers/net/ethernet/hisilicon/hns3/hns3_enet.c 		hns3_write_dev(q, HNS3_RING_RX_RING_BASEADDR_L_REG, (u32)dma);
dma              3675 drivers/net/ethernet/hisilicon/hns3/hns3_enet.c 			       (u32)((dma >> 31) >> 1));
dma              3684 drivers/net/ethernet/hisilicon/hns3/hns3_enet.c 			       (u32)dma);
dma              3686 drivers/net/ethernet/hisilicon/hns3/hns3_enet.c 			       (u32)((dma >> 31) >> 1));
dma              4144 drivers/net/ethernet/hisilicon/hns3/hns3_enet.c 			ring->desc_cb[ring->next_to_use].dma = 0;
dma               302 drivers/net/ethernet/hisilicon/hns3/hns3_enet.h 	dma_addr_t dma; /* dma address of this desc */
dma               103 drivers/net/ethernet/hisilicon/hns3/hns3pf/hclge_cmd.c 	dma_addr_t dma = ring->desc_dma_addr;
dma               110 drivers/net/ethernet/hisilicon/hns3/hns3pf/hclge_cmd.c 				lower_32_bits(dma));
dma               112 drivers/net/ethernet/hisilicon/hns3/hns3pf/hclge_cmd.c 				upper_32_bits(dma));
dma               121 drivers/net/ethernet/hisilicon/hns3/hns3pf/hclge_cmd.c 				lower_32_bits(dma));
dma               123 drivers/net/ethernet/hisilicon/hns3/hns3pf/hclge_cmd.c 				upper_32_bits(dma));
dma                26 drivers/net/ethernet/hisilicon/hns3/hns3vf/hclgevf_cmd.h 	dma_addr_t dma;
dma               683 drivers/net/ethernet/hp/hp100.c 		dev->dma = 4;
dma               204 drivers/net/ethernet/i825xx/lasi_82596.c 	dma_free_attrs(&pdev->dev, sizeof(struct i596_private), lp->dma,
dma               315 drivers/net/ethernet/i825xx/lib82596.c 	struct i596_dma *dma;
dma               369 drivers/net/ethernet/i825xx/lib82596.c static inline int wait_istat(struct net_device *dev, struct i596_dma *dma, int delcnt, char *str)
dma               371 drivers/net/ethernet/i825xx/lib82596.c 	DMA_INV(dev, &(dma->iscp), sizeof(struct i596_iscp));
dma               372 drivers/net/ethernet/i825xx/lib82596.c 	while (--delcnt && dma->iscp.stat) {
dma               374 drivers/net/ethernet/i825xx/lib82596.c 		DMA_INV(dev, &(dma->iscp), sizeof(struct i596_iscp));
dma               378 drivers/net/ethernet/i825xx/lib82596.c 		     dev->name, str, SWAP16(dma->iscp.stat));
dma               385 drivers/net/ethernet/i825xx/lib82596.c static inline int wait_cmd(struct net_device *dev, struct i596_dma *dma, int delcnt, char *str)
dma               387 drivers/net/ethernet/i825xx/lib82596.c 	DMA_INV(dev, &(dma->scb), sizeof(struct i596_scb));
dma               388 drivers/net/ethernet/i825xx/lib82596.c 	while (--delcnt && dma->scb.command) {
dma               390 drivers/net/ethernet/i825xx/lib82596.c 		DMA_INV(dev, &(dma->scb), sizeof(struct i596_scb));
dma               395 drivers/net/ethernet/i825xx/lib82596.c 		       SWAP16(dma->scb.status),
dma               396 drivers/net/ethernet/i825xx/lib82596.c 		       SWAP16(dma->scb.command));
dma               406 drivers/net/ethernet/i825xx/lib82596.c 	struct i596_dma *dma = lp->dma;
dma               412 drivers/net/ethernet/i825xx/lib82596.c 	       &dma->scp, dma->scp.sysbus, SWAP32(dma->scp.iscp));
dma               414 drivers/net/ethernet/i825xx/lib82596.c 	       &dma->iscp, SWAP32(dma->iscp.stat), SWAP32(dma->iscp.scb));
dma               417 drivers/net/ethernet/i825xx/lib82596.c 	       &dma->scb, SWAP16(dma->scb.status), SWAP16(dma->scb.command),
dma               418 drivers/net/ethernet/i825xx/lib82596.c 		SWAP16(dma->scb.cmd), SWAP32(dma->scb.rfd));
dma               421 drivers/net/ethernet/i825xx/lib82596.c 	       SWAP32(dma->scb.crc_err), SWAP32(dma->scb.align_err),
dma               422 drivers/net/ethernet/i825xx/lib82596.c 	       SWAP32(dma->scb.resource_err), SWAP32(dma->scb.over_err),
dma               423 drivers/net/ethernet/i825xx/lib82596.c 	       SWAP32(dma->scb.rcvdt_err), SWAP32(dma->scb.short_err));
dma               454 drivers/net/ethernet/i825xx/lib82596.c 	DMA_INV(dev, dma, sizeof(struct i596_dma));
dma               458 drivers/net/ethernet/i825xx/lib82596.c #define virt_to_dma(lp, v) ((lp)->dma_addr + (dma_addr_t)((unsigned long)(v)-(unsigned long)((lp)->dma)))
dma               463 drivers/net/ethernet/i825xx/lib82596.c 	struct i596_dma *dma = lp->dma;
dma               470 drivers/net/ethernet/i825xx/lib82596.c 	for (i = 0, rbd = dma->rbds; i < rx_ring_size; i++, rbd++) {
dma               487 drivers/net/ethernet/i825xx/lib82596.c 	lp->rbd_head = dma->rbds;
dma               488 drivers/net/ethernet/i825xx/lib82596.c 	rbd = dma->rbds + rx_ring_size - 1;
dma               489 drivers/net/ethernet/i825xx/lib82596.c 	rbd->v_next = dma->rbds;
dma               490 drivers/net/ethernet/i825xx/lib82596.c 	rbd->b_next = SWAP32(virt_to_dma(lp, dma->rbds));
dma               494 drivers/net/ethernet/i825xx/lib82596.c 	for (i = 0, rfd = dma->rfds; i < rx_ring_size; i++, rfd++) {
dma               501 drivers/net/ethernet/i825xx/lib82596.c 	lp->rfd_head = dma->rfds;
dma               502 drivers/net/ethernet/i825xx/lib82596.c 	dma->scb.rfd = SWAP32(virt_to_dma(lp, dma->rfds));
dma               503 drivers/net/ethernet/i825xx/lib82596.c 	rfd = dma->rfds;
dma               505 drivers/net/ethernet/i825xx/lib82596.c 	rfd->v_prev = dma->rfds + rx_ring_size - 1;
dma               506 drivers/net/ethernet/i825xx/lib82596.c 	rfd = dma->rfds + rx_ring_size - 1;
dma               507 drivers/net/ethernet/i825xx/lib82596.c 	rfd->v_next = dma->rfds;
dma               508 drivers/net/ethernet/i825xx/lib82596.c 	rfd->b_next = SWAP32(virt_to_dma(lp, dma->rfds));
dma               511 drivers/net/ethernet/i825xx/lib82596.c 	DMA_WBACK_INV(dev, dma, sizeof(struct i596_dma));
dma               521 drivers/net/ethernet/i825xx/lib82596.c 	for (i = 0, rbd = lp->dma->rbds; i < rx_ring_size; i++, rbd++) {
dma               535 drivers/net/ethernet/i825xx/lib82596.c 	struct i596_dma *dma = lp->dma;
dma               541 drivers/net/ethernet/i825xx/lib82596.c 		dma->rfds[i].rbd = I596_NULL;
dma               542 drivers/net/ethernet/i825xx/lib82596.c 		dma->rfds[i].cmd = SWAP16(CMD_FLEX);
dma               544 drivers/net/ethernet/i825xx/lib82596.c 	dma->rfds[rx_ring_size-1].cmd = SWAP16(CMD_EOL|CMD_FLEX);
dma               545 drivers/net/ethernet/i825xx/lib82596.c 	lp->rfd_head = dma->rfds;
dma               546 drivers/net/ethernet/i825xx/lib82596.c 	dma->scb.rfd = SWAP32(virt_to_dma(lp, dma->rfds));
dma               547 drivers/net/ethernet/i825xx/lib82596.c 	lp->rbd_head = dma->rbds;
dma               548 drivers/net/ethernet/i825xx/lib82596.c 	dma->rfds[0].rbd = SWAP32(virt_to_dma(lp, dma->rbds));
dma               550 drivers/net/ethernet/i825xx/lib82596.c 	DMA_WBACK_INV(dev, dma, sizeof(struct i596_dma));
dma               557 drivers/net/ethernet/i825xx/lib82596.c 	struct i596_dma *dma = lp->dma;
dma               567 drivers/net/ethernet/i825xx/lib82596.c 	dma->scp.sysbus = SYSBUS;
dma               568 drivers/net/ethernet/i825xx/lib82596.c 	dma->scp.iscp = SWAP32(virt_to_dma(lp, &(dma->iscp)));
dma               569 drivers/net/ethernet/i825xx/lib82596.c 	dma->iscp.scb = SWAP32(virt_to_dma(lp, &(dma->scb)));
dma               570 drivers/net/ethernet/i825xx/lib82596.c 	dma->iscp.stat = SWAP32(ISCP_BUSY);
dma               574 drivers/net/ethernet/i825xx/lib82596.c 	dma->scb.cmd = I596_NULL;
dma               578 drivers/net/ethernet/i825xx/lib82596.c 	DMA_WBACK(dev, &(dma->scp), sizeof(struct i596_scp));
dma               579 drivers/net/ethernet/i825xx/lib82596.c 	DMA_WBACK(dev, &(dma->iscp), sizeof(struct i596_iscp));
dma               580 drivers/net/ethernet/i825xx/lib82596.c 	DMA_WBACK(dev, &(dma->scb), sizeof(struct i596_scb));
dma               582 drivers/net/ethernet/i825xx/lib82596.c 	mpu_port(dev, PORT_ALTSCP, virt_to_dma(lp, &dma->scp));
dma               584 drivers/net/ethernet/i825xx/lib82596.c 	if (wait_istat(dev, dma, 1000, "initialization timed out"))
dma               598 drivers/net/ethernet/i825xx/lib82596.c 	dma->scb.command = 0;
dma               599 drivers/net/ethernet/i825xx/lib82596.c 	DMA_WBACK(dev, &(dma->scb), sizeof(struct i596_scb));
dma               603 drivers/net/ethernet/i825xx/lib82596.c 	memcpy(dma->cf_cmd.i596_config, init_setup, 14);
dma               604 drivers/net/ethernet/i825xx/lib82596.c 	dma->cf_cmd.cmd.command = SWAP16(CmdConfigure);
dma               605 drivers/net/ethernet/i825xx/lib82596.c 	DMA_WBACK(dev, &(dma->cf_cmd), sizeof(struct cf_cmd));
dma               606 drivers/net/ethernet/i825xx/lib82596.c 	i596_add_cmd(dev, &dma->cf_cmd.cmd);
dma               609 drivers/net/ethernet/i825xx/lib82596.c 	memcpy(dma->sa_cmd.eth_addr, dev->dev_addr, ETH_ALEN);
dma               610 drivers/net/ethernet/i825xx/lib82596.c 	dma->sa_cmd.cmd.command = SWAP16(CmdSASetup);
dma               611 drivers/net/ethernet/i825xx/lib82596.c 	DMA_WBACK(dev, &(dma->sa_cmd), sizeof(struct sa_cmd));
dma               612 drivers/net/ethernet/i825xx/lib82596.c 	i596_add_cmd(dev, &dma->sa_cmd.cmd);
dma               615 drivers/net/ethernet/i825xx/lib82596.c 	dma->tdr_cmd.cmd.command = SWAP16(CmdTDR);
dma               616 drivers/net/ethernet/i825xx/lib82596.c 	DMA_WBACK(dev, &(dma->tdr_cmd), sizeof(struct tdr_cmd));
dma               617 drivers/net/ethernet/i825xx/lib82596.c 	i596_add_cmd(dev, &dma->tdr_cmd.cmd);
dma               621 drivers/net/ethernet/i825xx/lib82596.c 	if (wait_cmd(dev, dma, 1000, "timed out waiting to issue RX_START")) {
dma               626 drivers/net/ethernet/i825xx/lib82596.c 	dma->scb.command = SWAP16(RX_START);
dma               627 drivers/net/ethernet/i825xx/lib82596.c 	dma->scb.rfd = SWAP32(virt_to_dma(lp, dma->rfds));
dma               628 drivers/net/ethernet/i825xx/lib82596.c 	DMA_WBACK(dev, &(dma->scb), sizeof(struct i596_scb));
dma               633 drivers/net/ethernet/i825xx/lib82596.c 	if (wait_cmd(dev, dma, 1000, "RX_START not processed"))
dma               780 drivers/net/ethernet/i825xx/lib82596.c 		lp->dma->scb.rfd = rfd->b_next;
dma               833 drivers/net/ethernet/i825xx/lib82596.c 	wait_cmd(dev, lp->dma, 100, "i596_cleanup_cmd timed out");
dma               834 drivers/net/ethernet/i825xx/lib82596.c 	lp->dma->scb.cmd = I596_NULL;
dma               835 drivers/net/ethernet/i825xx/lib82596.c 	DMA_WBACK(dev, &(lp->dma->scb), sizeof(struct i596_scb));
dma               847 drivers/net/ethernet/i825xx/lib82596.c 	wait_cmd(dev, lp->dma, 100, "i596_reset timed out");
dma               852 drivers/net/ethernet/i825xx/lib82596.c 	lp->dma->scb.command = SWAP16(CUC_ABORT | RX_ABORT);
dma               853 drivers/net/ethernet/i825xx/lib82596.c 	DMA_WBACK(dev, &(lp->dma->scb), sizeof(struct i596_scb));
dma               857 drivers/net/ethernet/i825xx/lib82596.c 	wait_cmd(dev, lp->dma, 1000, "i596_reset 2 timed out");
dma               871 drivers/net/ethernet/i825xx/lib82596.c 	struct i596_dma *dma = lp->dma;
dma               891 drivers/net/ethernet/i825xx/lib82596.c 		wait_cmd(dev, dma, 100, "i596_add_cmd timed out");
dma               892 drivers/net/ethernet/i825xx/lib82596.c 		dma->scb.cmd = SWAP32(virt_to_dma(lp, &cmd->status));
dma               893 drivers/net/ethernet/i825xx/lib82596.c 		dma->scb.command = SWAP16(CUC_START);
dma               894 drivers/net/ethernet/i825xx/lib82596.c 		DMA_WBACK(dev, &(dma->scb), sizeof(struct i596_scb));
dma               958 drivers/net/ethernet/i825xx/lib82596.c 		lp->dma->scb.command = SWAP16(CUC_START | RX_START);
dma               959 drivers/net/ethernet/i825xx/lib82596.c 		DMA_WBACK_INV(dev, &(lp->dma->scb), sizeof(struct i596_scb));
dma               988 drivers/net/ethernet/i825xx/lib82596.c 	tx_cmd = lp->dma->tx_cmds + lp->next_tx_cmd;
dma               989 drivers/net/ethernet/i825xx/lib82596.c 	tbd = lp->dma->tbds + lp->next_tx_cmd;
dma              1052 drivers/net/ethernet/i825xx/lib82596.c 	struct i596_dma *dma;
dma              1066 drivers/net/ethernet/i825xx/lib82596.c 	dma = dma_alloc_attrs(dev->dev.parent, sizeof(struct i596_dma),
dma              1069 drivers/net/ethernet/i825xx/lib82596.c 	if (!dma) {
dma              1077 drivers/net/ethernet/i825xx/lib82596.c 	memset(dma, 0, sizeof(struct i596_dma));
dma              1078 drivers/net/ethernet/i825xx/lib82596.c 	lp->dma = dma;
dma              1080 drivers/net/ethernet/i825xx/lib82596.c 	dma->scb.command = 0;
dma              1081 drivers/net/ethernet/i825xx/lib82596.c 	dma->scb.cmd = I596_NULL;
dma              1082 drivers/net/ethernet/i825xx/lib82596.c 	dma->scb.rfd = I596_NULL;
dma              1085 drivers/net/ethernet/i825xx/lib82596.c 	DMA_WBACK_INV(dev, dma, sizeof(struct i596_dma));
dma              1090 drivers/net/ethernet/i825xx/lib82596.c 			       dma, lp->dma_addr, LIB82596_DMA_ATTR);
dma              1099 drivers/net/ethernet/i825xx/lib82596.c 			     dev->name, dma, (int)sizeof(struct i596_dma),
dma              1100 drivers/net/ethernet/i825xx/lib82596.c 			     &dma->scb));
dma              1118 drivers/net/ethernet/i825xx/lib82596.c 	struct i596_dma *dma;
dma              1122 drivers/net/ethernet/i825xx/lib82596.c 	dma = lp->dma;
dma              1126 drivers/net/ethernet/i825xx/lib82596.c 	wait_cmd(dev, dma, 100, "i596 interrupt, timeout");
dma              1127 drivers/net/ethernet/i825xx/lib82596.c 	status = SWAP16(dma->scb.status);
dma              1259 drivers/net/ethernet/i825xx/lib82596.c 		dma->scb.cmd = SWAP32(virt_to_dma(lp, &lp->cmd_head->status));
dma              1260 drivers/net/ethernet/i825xx/lib82596.c 		DMA_WBACK_INV(dev, &dma->scb, sizeof(struct i596_scb));
dma              1283 drivers/net/ethernet/i825xx/lib82596.c 	wait_cmd(dev, dma, 100, "i596 interrupt, timeout");
dma              1284 drivers/net/ethernet/i825xx/lib82596.c 	dma->scb.command = SWAP16(ack_cmd);
dma              1285 drivers/net/ethernet/i825xx/lib82596.c 	DMA_WBACK(dev, &dma->scb, sizeof(struct i596_scb));
dma              1293 drivers/net/ethernet/i825xx/lib82596.c 	wait_cmd(dev, dma, 100, "i596 interrupt, exit timeout");
dma              1310 drivers/net/ethernet/i825xx/lib82596.c 		   dev->name, SWAP16(lp->dma->scb.status)));
dma              1314 drivers/net/ethernet/i825xx/lib82596.c 	wait_cmd(dev, lp->dma, 100, "close1 timed out");
dma              1315 drivers/net/ethernet/i825xx/lib82596.c 	lp->dma->scb.command = SWAP16(CUC_ABORT | RX_ABORT);
dma              1316 drivers/net/ethernet/i825xx/lib82596.c 	DMA_WBACK(dev, &lp->dma->scb, sizeof(struct i596_scb));
dma              1320 drivers/net/ethernet/i825xx/lib82596.c 	wait_cmd(dev, lp->dma, 100, "close2 timed out");
dma              1338 drivers/net/ethernet/i825xx/lib82596.c 	struct i596_dma *dma = lp->dma;
dma              1349 drivers/net/ethernet/i825xx/lib82596.c 	    !(dma->cf_cmd.i596_config[8] & 0x01)) {
dma              1350 drivers/net/ethernet/i825xx/lib82596.c 		dma->cf_cmd.i596_config[8] |= 0x01;
dma              1354 drivers/net/ethernet/i825xx/lib82596.c 	    (dma->cf_cmd.i596_config[8] & 0x01)) {
dma              1355 drivers/net/ethernet/i825xx/lib82596.c 		dma->cf_cmd.i596_config[8] &= ~0x01;
dma              1359 drivers/net/ethernet/i825xx/lib82596.c 	    (dma->cf_cmd.i596_config[11] & 0x20)) {
dma              1360 drivers/net/ethernet/i825xx/lib82596.c 		dma->cf_cmd.i596_config[11] &= ~0x20;
dma              1364 drivers/net/ethernet/i825xx/lib82596.c 	    !(dma->cf_cmd.i596_config[11] & 0x20)) {
dma              1365 drivers/net/ethernet/i825xx/lib82596.c 		dma->cf_cmd.i596_config[11] |= 0x20;
dma              1369 drivers/net/ethernet/i825xx/lib82596.c 		if (dma->cf_cmd.cmd.command)
dma              1374 drivers/net/ethernet/i825xx/lib82596.c 			dma->cf_cmd.cmd.command = SWAP16(CmdConfigure);
dma              1375 drivers/net/ethernet/i825xx/lib82596.c 			DMA_WBACK_INV(dev, &dma->cf_cmd, sizeof(struct cf_cmd));
dma              1376 drivers/net/ethernet/i825xx/lib82596.c 			i596_add_cmd(dev, &dma->cf_cmd.cmd);
dma              1392 drivers/net/ethernet/i825xx/lib82596.c 		cmd = &dma->mc_cmd;
dma              1407 drivers/net/ethernet/i825xx/lib82596.c 		DMA_WBACK_INV(dev, &dma->mc_cmd, sizeof(struct mc_cmd));
dma               156 drivers/net/ethernet/i825xx/sni_82596.c 	dma_free_attrs(dev->dev.parent, sizeof(struct i596_private), lp->dma,
dma               287 drivers/net/ethernet/ibm/ibmvnic.c 		pool->rx_buff[index].dma = dma_addr;
dma               912 drivers/net/ethernet/ibm/ibmvnic.h 	dma_addr_t dma;
dma               127 drivers/net/ethernet/intel/e1000/e1000.h 	dma_addr_t dma;
dma               141 drivers/net/ethernet/intel/e1000/e1000.h 	dma_addr_t dma;
dma               148 drivers/net/ethernet/intel/e1000/e1000.h 	dma_addr_t dma;
dma               169 drivers/net/ethernet/intel/e1000/e1000.h 	dma_addr_t dma;
dma               934 drivers/net/ethernet/intel/e1000/e1000_ethtool.c 			if (txdr->buffer_info[i].dma)
dma               936 drivers/net/ethernet/intel/e1000/e1000_ethtool.c 						 txdr->buffer_info[i].dma,
dma               945 drivers/net/ethernet/intel/e1000/e1000_ethtool.c 			if (rxdr->buffer_info[i].dma)
dma               947 drivers/net/ethernet/intel/e1000/e1000_ethtool.c 						 rxdr->buffer_info[i].dma,
dma               956 drivers/net/ethernet/intel/e1000/e1000_ethtool.c 				  txdr->dma);
dma               961 drivers/net/ethernet/intel/e1000/e1000_ethtool.c 				  rxdr->dma);
dma               994 drivers/net/ethernet/intel/e1000/e1000_ethtool.c 	txdr->desc = dma_alloc_coherent(&pdev->dev, txdr->size, &txdr->dma,
dma              1002 drivers/net/ethernet/intel/e1000/e1000_ethtool.c 	ew32(TDBAL, ((u64)txdr->dma & 0x00000000FFFFFFFF));
dma              1003 drivers/net/ethernet/intel/e1000/e1000_ethtool.c 	ew32(TDBAH, ((u64)txdr->dma >> 32));
dma              1024 drivers/net/ethernet/intel/e1000/e1000_ethtool.c 		txdr->buffer_info[i].dma =
dma              1027 drivers/net/ethernet/intel/e1000/e1000_ethtool.c 		if (dma_mapping_error(&pdev->dev, txdr->buffer_info[i].dma)) {
dma              1031 drivers/net/ethernet/intel/e1000/e1000_ethtool.c 		tx_desc->buffer_addr = cpu_to_le64(txdr->buffer_info[i].dma);
dma              1052 drivers/net/ethernet/intel/e1000/e1000_ethtool.c 	rxdr->desc = dma_alloc_coherent(&pdev->dev, rxdr->size, &rxdr->dma,
dma              1062 drivers/net/ethernet/intel/e1000/e1000_ethtool.c 	ew32(RDBAL, ((u64)rxdr->dma & 0xFFFFFFFF));
dma              1063 drivers/net/ethernet/intel/e1000/e1000_ethtool.c 	ew32(RDBAH, ((u64)rxdr->dma >> 32));
dma              1084 drivers/net/ethernet/intel/e1000/e1000_ethtool.c 		rxdr->buffer_info[i].dma =
dma              1088 drivers/net/ethernet/intel/e1000/e1000_ethtool.c 		if (dma_mapping_error(&pdev->dev, rxdr->buffer_info[i].dma)) {
dma              1092 drivers/net/ethernet/intel/e1000/e1000_ethtool.c 		rx_desc->buffer_addr = cpu_to_le64(rxdr->buffer_info[i].dma);
dma              1405 drivers/net/ethernet/intel/e1000/e1000_ethtool.c 						   txdr->buffer_info[k].dma,
dma              1418 drivers/net/ethernet/intel/e1000/e1000_ethtool.c 						rxdr->buffer_info[l].dma,
dma              1508 drivers/net/ethernet/intel/e1000/e1000_main.c 	txdr->desc = dma_alloc_coherent(&pdev->dev, txdr->size, &txdr->dma,
dma              1519 drivers/net/ethernet/intel/e1000/e1000_main.c 		dma_addr_t olddma = txdr->dma;
dma              1524 drivers/net/ethernet/intel/e1000/e1000_main.c 						&txdr->dma, GFP_KERNEL);
dma              1535 drivers/net/ethernet/intel/e1000/e1000_main.c 					  txdr->dma);
dma              1599 drivers/net/ethernet/intel/e1000/e1000_main.c 		tdba = adapter->tx_ring[0].dma;
dma              1700 drivers/net/ethernet/intel/e1000/e1000_main.c 	rxdr->desc = dma_alloc_coherent(&pdev->dev, rxdr->size, &rxdr->dma,
dma              1711 drivers/net/ethernet/intel/e1000/e1000_main.c 		dma_addr_t olddma = rxdr->dma;
dma              1716 drivers/net/ethernet/intel/e1000/e1000_main.c 						&rxdr->dma, GFP_KERNEL);
dma              1727 drivers/net/ethernet/intel/e1000/e1000_main.c 					  rxdr->dma);
dma              1883 drivers/net/ethernet/intel/e1000/e1000_main.c 		rdba = adapter->rx_ring[0].dma;
dma              1929 drivers/net/ethernet/intel/e1000/e1000_main.c 			  tx_ring->dma);
dma              1952 drivers/net/ethernet/intel/e1000/e1000_main.c 	if (buffer_info->dma) {
dma              1954 drivers/net/ethernet/intel/e1000/e1000_main.c 			dma_unmap_page(&adapter->pdev->dev, buffer_info->dma,
dma              1957 drivers/net/ethernet/intel/e1000/e1000_main.c 			dma_unmap_single(&adapter->pdev->dev, buffer_info->dma,
dma              1960 drivers/net/ethernet/intel/e1000/e1000_main.c 		buffer_info->dma = 0;
dma              2036 drivers/net/ethernet/intel/e1000/e1000_main.c 			  rx_ring->dma);
dma              2090 drivers/net/ethernet/intel/e1000/e1000_main.c 			if (buffer_info->dma)
dma              2091 drivers/net/ethernet/intel/e1000/e1000_main.c 				dma_unmap_single(&pdev->dev, buffer_info->dma,
dma              2099 drivers/net/ethernet/intel/e1000/e1000_main.c 			if (buffer_info->dma)
dma              2100 drivers/net/ethernet/intel/e1000/e1000_main.c 				dma_unmap_page(&pdev->dev, buffer_info->dma,
dma              2109 drivers/net/ethernet/intel/e1000/e1000_main.c 		buffer_info->dma = 0;
dma              2874 drivers/net/ethernet/intel/e1000/e1000_main.c 		buffer_info->dma = dma_map_single(&pdev->dev,
dma              2877 drivers/net/ethernet/intel/e1000/e1000_main.c 		if (dma_mapping_error(&pdev->dev, buffer_info->dma))
dma              2926 drivers/net/ethernet/intel/e1000/e1000_main.c 			buffer_info->dma = skb_frag_dma_map(&pdev->dev, frag,
dma              2928 drivers/net/ethernet/intel/e1000/e1000_main.c 			if (dma_mapping_error(&pdev->dev, buffer_info->dma))
dma              2951 drivers/net/ethernet/intel/e1000/e1000_main.c 	buffer_info->dma = 0;
dma              3002 drivers/net/ethernet/intel/e1000/e1000_main.c 		tx_desc->buffer_addr = cpu_to_le64(buffer_info->dma);
dma              3420 drivers/net/ethernet/intel/e1000/e1000_main.c 			(u64)buffer_info->dma, buffer_info->length,
dma              3459 drivers/net/ethernet/intel/e1000/e1000_main.c 			(u64)buffer_info->dma, buffer_info->rxbuf.data, type);
dma              4157 drivers/net/ethernet/intel/e1000/e1000_main.c 		dma_unmap_page(&pdev->dev, buffer_info->dma,
dma              4159 drivers/net/ethernet/intel/e1000/e1000_main.c 		buffer_info->dma = 0;
dma              4324 drivers/net/ethernet/intel/e1000/e1000_main.c 	dma_sync_single_for_cpu(&adapter->pdev->dev, buffer_info->dma,
dma              4383 drivers/net/ethernet/intel/e1000/e1000_main.c 			dma_unmap_single(&pdev->dev, buffer_info->dma,
dma              4386 drivers/net/ethernet/intel/e1000/e1000_main.c 			buffer_info->dma = 0;
dma              4509 drivers/net/ethernet/intel/e1000/e1000_main.c 		if (!buffer_info->dma) {
dma              4510 drivers/net/ethernet/intel/e1000/e1000_main.c 			buffer_info->dma = dma_map_page(&pdev->dev,
dma              4514 drivers/net/ethernet/intel/e1000/e1000_main.c 			if (dma_mapping_error(&pdev->dev, buffer_info->dma)) {
dma              4517 drivers/net/ethernet/intel/e1000/e1000_main.c 				buffer_info->dma = 0;
dma              4524 drivers/net/ethernet/intel/e1000/e1000_main.c 		rx_desc->buffer_addr = cpu_to_le64(buffer_info->dma);
dma              4602 drivers/net/ethernet/intel/e1000/e1000_main.c 		buffer_info->dma = dma_map_single(&pdev->dev,
dma              4606 drivers/net/ethernet/intel/e1000/e1000_main.c 		if (dma_mapping_error(&pdev->dev, buffer_info->dma)) {
dma              4608 drivers/net/ethernet/intel/e1000/e1000_main.c 			buffer_info->dma = 0;
dma              4619 drivers/net/ethernet/intel/e1000/e1000_main.c 					(void *)(unsigned long)buffer_info->dma,
dma              4623 drivers/net/ethernet/intel/e1000/e1000_main.c 			      (void *)(unsigned long)buffer_info->dma);
dma              4625 drivers/net/ethernet/intel/e1000/e1000_main.c 			dma_unmap_single(&pdev->dev, buffer_info->dma,
dma              4631 drivers/net/ethernet/intel/e1000/e1000_main.c 			buffer_info->dma = 0;
dma              4639 drivers/net/ethernet/intel/e1000/e1000_main.c 		rx_desc->buffer_addr = cpu_to_le64(buffer_info->dma);
dma               121 drivers/net/ethernet/intel/e1000e/e1000.h 	u64 dma; /* must be u64 - written to hw */
dma               128 drivers/net/ethernet/intel/e1000e/e1000.h 	dma_addr_t dma;
dma               152 drivers/net/ethernet/intel/e1000e/e1000.h 	dma_addr_t dma;			/* phys address of ring    */
dma              1124 drivers/net/ethernet/intel/e1000e/ethtool.c 			if (buffer_info->dma)
dma              1126 drivers/net/ethernet/intel/e1000e/ethtool.c 						 buffer_info->dma,
dma              1137 drivers/net/ethernet/intel/e1000e/ethtool.c 			if (buffer_info->dma)
dma              1139 drivers/net/ethernet/intel/e1000e/ethtool.c 						 buffer_info->dma,
dma              1147 drivers/net/ethernet/intel/e1000e/ethtool.c 				  tx_ring->dma);
dma              1152 drivers/net/ethernet/intel/e1000e/ethtool.c 				  rx_ring->dma);
dma              1187 drivers/net/ethernet/intel/e1000e/ethtool.c 					   &tx_ring->dma, GFP_KERNEL);
dma              1195 drivers/net/ethernet/intel/e1000e/ethtool.c 	ew32(TDBAL(0), ((u64)tx_ring->dma & 0x00000000FFFFFFFF));
dma              1196 drivers/net/ethernet/intel/e1000e/ethtool.c 	ew32(TDBAH(0), ((u64)tx_ring->dma >> 32));
dma              1217 drivers/net/ethernet/intel/e1000e/ethtool.c 		tx_ring->buffer_info[i].dma =
dma              1221 drivers/net/ethernet/intel/e1000e/ethtool.c 				      tx_ring->buffer_info[i].dma)) {
dma              1225 drivers/net/ethernet/intel/e1000e/ethtool.c 		tx_desc->buffer_addr = cpu_to_le64(tx_ring->buffer_info[i].dma);
dma              1247 drivers/net/ethernet/intel/e1000e/ethtool.c 					   &rx_ring->dma, GFP_KERNEL);
dma              1258 drivers/net/ethernet/intel/e1000e/ethtool.c 	ew32(RDBAL(0), ((u64)rx_ring->dma & 0xFFFFFFFF));
dma              1259 drivers/net/ethernet/intel/e1000e/ethtool.c 	ew32(RDBAH(0), ((u64)rx_ring->dma >> 32));
dma              1281 drivers/net/ethernet/intel/e1000e/ethtool.c 		rx_ring->buffer_info[i].dma =
dma              1285 drivers/net/ethernet/intel/e1000e/ethtool.c 				      rx_ring->buffer_info[i].dma)) {
dma              1291 drivers/net/ethernet/intel/e1000e/ethtool.c 		    cpu_to_le64(rx_ring->buffer_info[i].dma);
dma              1660 drivers/net/ethernet/intel/e1000e/ethtool.c 						   buffer_info->dma,
dma              1677 drivers/net/ethernet/intel/e1000e/ethtool.c 						buffer_info->dma, 2048,
dma               248 drivers/net/ethernet/intel/e1000e/netdev.c 		(unsigned long long)buffer_info->dma,
dma               308 drivers/net/ethernet/intel/e1000e/netdev.c 			(unsigned long long)buffer_info->dma,
dma               391 drivers/net/ethernet/intel/e1000e/netdev.c 					(unsigned long long)buffer_info->dma,
dma               453 drivers/net/ethernet/intel/e1000e/netdev.c 					(unsigned long long)buffer_info->dma,
dma               675 drivers/net/ethernet/intel/e1000e/netdev.c 		buffer_info->dma = dma_map_single(&pdev->dev, skb->data,
dma               678 drivers/net/ethernet/intel/e1000e/netdev.c 		if (dma_mapping_error(&pdev->dev, buffer_info->dma)) {
dma               685 drivers/net/ethernet/intel/e1000e/netdev.c 		rx_desc->read.buffer_addr = cpu_to_le64(buffer_info->dma);
dma               744 drivers/net/ethernet/intel/e1000e/netdev.c 				ps_page->dma = dma_map_page(&pdev->dev,
dma               749 drivers/net/ethernet/intel/e1000e/netdev.c 						      ps_page->dma)) {
dma               761 drivers/net/ethernet/intel/e1000e/netdev.c 			    cpu_to_le64(ps_page->dma);
dma               773 drivers/net/ethernet/intel/e1000e/netdev.c 		buffer_info->dma = dma_map_single(&pdev->dev, skb->data,
dma               776 drivers/net/ethernet/intel/e1000e/netdev.c 		if (dma_mapping_error(&pdev->dev, buffer_info->dma)) {
dma               785 drivers/net/ethernet/intel/e1000e/netdev.c 		rx_desc->read.buffer_addr[0] = cpu_to_le64(buffer_info->dma);
dma               856 drivers/net/ethernet/intel/e1000e/netdev.c 		if (!buffer_info->dma) {
dma               857 drivers/net/ethernet/intel/e1000e/netdev.c 			buffer_info->dma = dma_map_page(&pdev->dev,
dma               861 drivers/net/ethernet/intel/e1000e/netdev.c 			if (dma_mapping_error(&pdev->dev, buffer_info->dma)) {
dma               868 drivers/net/ethernet/intel/e1000e/netdev.c 		rx_desc->read.buffer_addr = cpu_to_le64(buffer_info->dma);
dma               950 drivers/net/ethernet/intel/e1000e/netdev.c 		dma_unmap_single(&pdev->dev, buffer_info->dma,
dma               952 drivers/net/ethernet/intel/e1000e/netdev.c 		buffer_info->dma = 0;
dma              1061 drivers/net/ethernet/intel/e1000e/netdev.c 	if (buffer_info->dma) {
dma              1063 drivers/net/ethernet/intel/e1000e/netdev.c 			dma_unmap_page(&adapter->pdev->dev, buffer_info->dma,
dma              1066 drivers/net/ethernet/intel/e1000e/netdev.c 			dma_unmap_single(&adapter->pdev->dev, buffer_info->dma,
dma              1068 drivers/net/ethernet/intel/e1000e/netdev.c 		buffer_info->dma = 0;
dma              1343 drivers/net/ethernet/intel/e1000e/netdev.c 		dma_unmap_single(&pdev->dev, buffer_info->dma,
dma              1345 drivers/net/ethernet/intel/e1000e/netdev.c 		buffer_info->dma = 0;
dma              1398 drivers/net/ethernet/intel/e1000e/netdev.c 							ps_page->dma,
dma              1405 drivers/net/ethernet/intel/e1000e/netdev.c 							   ps_page->dma,
dma              1426 drivers/net/ethernet/intel/e1000e/netdev.c 			dma_unmap_page(&pdev->dev, ps_page->dma, PAGE_SIZE,
dma              1428 drivers/net/ethernet/intel/e1000e/netdev.c 			ps_page->dma = 0;
dma              1547 drivers/net/ethernet/intel/e1000e/netdev.c 		dma_unmap_page(&pdev->dev, buffer_info->dma, PAGE_SIZE,
dma              1549 drivers/net/ethernet/intel/e1000e/netdev.c 		buffer_info->dma = 0;
dma              1684 drivers/net/ethernet/intel/e1000e/netdev.c 		if (buffer_info->dma) {
dma              1686 drivers/net/ethernet/intel/e1000e/netdev.c 				dma_unmap_single(&pdev->dev, buffer_info->dma,
dma              1690 drivers/net/ethernet/intel/e1000e/netdev.c 				dma_unmap_page(&pdev->dev, buffer_info->dma,
dma              1693 drivers/net/ethernet/intel/e1000e/netdev.c 				dma_unmap_single(&pdev->dev, buffer_info->dma,
dma              1696 drivers/net/ethernet/intel/e1000e/netdev.c 			buffer_info->dma = 0;
dma              1713 drivers/net/ethernet/intel/e1000e/netdev.c 			dma_unmap_page(&pdev->dev, ps_page->dma, PAGE_SIZE,
dma              1715 drivers/net/ethernet/intel/e1000e/netdev.c 			ps_page->dma = 0;
dma              2308 drivers/net/ethernet/intel/e1000e/netdev.c 	ring->desc = dma_alloc_coherent(&pdev->dev, ring->size, &ring->dma,
dma              2446 drivers/net/ethernet/intel/e1000e/netdev.c 			  tx_ring->dma);
dma              2471 drivers/net/ethernet/intel/e1000e/netdev.c 			  rx_ring->dma);
dma              2919 drivers/net/ethernet/intel/e1000e/netdev.c 	tdba = tx_ring->dma;
dma              3244 drivers/net/ethernet/intel/e1000e/netdev.c 	rdba = rx_ring->dma;
dma              3809 drivers/net/ethernet/intel/e1000e/netdev.c 	tx_desc->buffer_addr = tx_ring->dma;
dma              5574 drivers/net/ethernet/intel/e1000e/netdev.c 		buffer_info->dma = dma_map_single(&pdev->dev,
dma              5578 drivers/net/ethernet/intel/e1000e/netdev.c 		if (dma_mapping_error(&pdev->dev, buffer_info->dma))
dma              5609 drivers/net/ethernet/intel/e1000e/netdev.c 			buffer_info->dma = skb_frag_dma_map(&pdev->dev, frag,
dma              5613 drivers/net/ethernet/intel/e1000e/netdev.c 			if (dma_mapping_error(&pdev->dev, buffer_info->dma))
dma              5635 drivers/net/ethernet/intel/e1000e/netdev.c 	buffer_info->dma = 0;
dma              5690 drivers/net/ethernet/intel/e1000e/netdev.c 		tx_desc->buffer_addr = cpu_to_le64(buffer_info->dma);
dma                68 drivers/net/ethernet/intel/fm10k/fm10k.h 	DEFINE_DMA_UNMAP_ADDR(dma);
dma                73 drivers/net/ethernet/intel/fm10k/fm10k.h 	dma_addr_t dma;
dma               115 drivers/net/ethernet/intel/fm10k/fm10k.h 	dma_addr_t dma;			/* phys. address of descriptor ring */
dma                74 drivers/net/ethernet/intel/fm10k/fm10k_main.c 	dma_addr_t dma;
dma                88 drivers/net/ethernet/intel/fm10k/fm10k_main.c 	dma = dma_map_page(rx_ring->dev, page, 0, PAGE_SIZE, DMA_FROM_DEVICE);
dma                93 drivers/net/ethernet/intel/fm10k/fm10k_main.c 	if (dma_mapping_error(rx_ring->dev, dma)) {
dma               100 drivers/net/ethernet/intel/fm10k/fm10k_main.c 	bi->dma = dma;
dma               133 drivers/net/ethernet/intel/fm10k/fm10k_main.c 		rx_desc->q.pkt_addr = cpu_to_le64(bi->dma + bi->page_offset);
dma               194 drivers/net/ethernet/intel/fm10k/fm10k_main.c 	dma_sync_single_range_for_device(rx_ring->dev, old_buff->dma,
dma               338 drivers/net/ethernet/intel/fm10k/fm10k_main.c 				      rx_buffer->dma,
dma               349 drivers/net/ethernet/intel/fm10k/fm10k_main.c 		dma_unmap_page(rx_ring->dev, rx_buffer->dma,
dma               904 drivers/net/ethernet/intel/fm10k/fm10k_main.c 			       dma_addr_t dma, unsigned int size, u8 desc_flags)
dma               911 drivers/net/ethernet/intel/fm10k/fm10k_main.c 	tx_desc->buffer_addr = cpu_to_le64(dma);
dma               951 drivers/net/ethernet/intel/fm10k/fm10k_main.c 	dma_addr_t dma;
dma               968 drivers/net/ethernet/intel/fm10k/fm10k_main.c 	dma = dma_map_single(tx_ring->dev, data, size, DMA_TO_DEVICE);
dma               974 drivers/net/ethernet/intel/fm10k/fm10k_main.c 		if (dma_mapping_error(tx_ring->dev, dma))
dma               979 drivers/net/ethernet/intel/fm10k/fm10k_main.c 		dma_unmap_addr_set(tx_buffer, dma, dma);
dma               982 drivers/net/ethernet/intel/fm10k/fm10k_main.c 			if (fm10k_tx_desc_push(tx_ring, tx_desc++, i++, dma,
dma               988 drivers/net/ethernet/intel/fm10k/fm10k_main.c 			dma += FM10K_MAX_DATA_PER_TXD;
dma               996 drivers/net/ethernet/intel/fm10k/fm10k_main.c 				       dma, size, flags)) {
dma              1004 drivers/net/ethernet/intel/fm10k/fm10k_main.c 		dma = skb_frag_dma_map(tx_ring->dev, frag, 0, size,
dma              1013 drivers/net/ethernet/intel/fm10k/fm10k_main.c 	if (fm10k_tx_desc_push(tx_ring, tx_desc, i++, dma, size, flags))
dma              1232 drivers/net/ethernet/intel/fm10k/fm10k_main.c 				 dma_unmap_addr(tx_buffer, dma),
dma              1254 drivers/net/ethernet/intel/fm10k/fm10k_main.c 					       dma_unmap_addr(tx_buffer, dma),
dma                33 drivers/net/ethernet/intel/fm10k/fm10k_netdev.c 					   &tx_ring->dma, GFP_KERNEL);
dma               101 drivers/net/ethernet/intel/fm10k/fm10k_netdev.c 					   &rx_ring->dma, GFP_KERNEL);
dma               151 drivers/net/ethernet/intel/fm10k/fm10k_netdev.c 					 dma_unmap_addr(tx_buffer, dma),
dma               156 drivers/net/ethernet/intel/fm10k/fm10k_netdev.c 			       dma_unmap_addr(tx_buffer, dma),
dma               214 drivers/net/ethernet/intel/fm10k/fm10k_netdev.c 			  tx_ring->desc, tx_ring->dma);
dma               266 drivers/net/ethernet/intel/fm10k/fm10k_netdev.c 		dma_unmap_page(rx_ring->dev, buffer->dma,
dma               302 drivers/net/ethernet/intel/fm10k/fm10k_netdev.c 			  rx_ring->desc, rx_ring->dma);
dma               871 drivers/net/ethernet/intel/fm10k/fm10k_pci.c 	u64 tdba = ring->dma;
dma               980 drivers/net/ethernet/intel/fm10k/fm10k_pci.c 	u64 rdba = ring->dma;
dma              3158 drivers/net/ethernet/intel/i40e/i40e_main.c 	tx_ctx.base = (ring->dma / 128);
dma              3166 drivers/net/ethernet/intel/i40e/i40e_main.c 	tx_ctx.head_wb_addr = ring->dma +
dma              3296 drivers/net/ethernet/intel/i40e/i40e_main.c 	rx_ctx.base = (ring->dma / 128);
dma              4100 drivers/net/ethernet/intel/i40e/i40e_main.c 				 dma_unmap_addr(tx_buf, dma),
dma                97 drivers/net/ethernet/intel/i40e/i40e_txrx.c 	dma_addr_t dma;
dma               116 drivers/net/ethernet/intel/i40e/i40e_txrx.c 	dma = dma_map_single(dev, raw_packet,
dma               118 drivers/net/ethernet/intel/i40e/i40e_txrx.c 	if (dma_mapping_error(dev, dma))
dma               137 drivers/net/ethernet/intel/i40e/i40e_txrx.c 	dma_unmap_addr_set(tx_buf, dma, dma);
dma               139 drivers/net/ethernet/intel/i40e/i40e_txrx.c 	tx_desc->buffer_addr = cpu_to_le64(dma);
dma               613 drivers/net/ethernet/intel/i40e/i40e_txrx.c 					 dma_unmap_addr(tx_buffer, dma),
dma               618 drivers/net/ethernet/intel/i40e/i40e_txrx.c 			       dma_unmap_addr(tx_buffer, dma),
dma               681 drivers/net/ethernet/intel/i40e/i40e_txrx.c 				  tx_ring->desc, tx_ring->dma);
dma               820 drivers/net/ethernet/intel/i40e/i40e_txrx.c 				 dma_unmap_addr(tx_buf, dma),
dma               845 drivers/net/ethernet/intel/i40e/i40e_txrx.c 					       dma_unmap_addr(tx_buf, dma),
dma              1218 drivers/net/ethernet/intel/i40e/i40e_txrx.c 	new_buff->dma		= old_buff->dma;
dma              1321 drivers/net/ethernet/intel/i40e/i40e_txrx.c 					   &tx_ring->dma, GFP_KERNEL);
dma              1373 drivers/net/ethernet/intel/i40e/i40e_txrx.c 					      rx_bi->dma,
dma              1379 drivers/net/ethernet/intel/i40e/i40e_txrx.c 		dma_unmap_page_attrs(rx_ring->dev, rx_bi->dma,
dma              1419 drivers/net/ethernet/intel/i40e/i40e_txrx.c 				  rx_ring->desc, rx_ring->dma);
dma              1449 drivers/net/ethernet/intel/i40e/i40e_txrx.c 					   &rx_ring->dma, GFP_KERNEL);
dma              1522 drivers/net/ethernet/intel/i40e/i40e_txrx.c 	dma_addr_t dma;
dma              1538 drivers/net/ethernet/intel/i40e/i40e_txrx.c 	dma = dma_map_page_attrs(rx_ring->dev, page, 0,
dma              1546 drivers/net/ethernet/intel/i40e/i40e_txrx.c 	if (dma_mapping_error(rx_ring->dev, dma)) {
dma              1552 drivers/net/ethernet/intel/i40e/i40e_txrx.c 	bi->dma = dma;
dma              1586 drivers/net/ethernet/intel/i40e/i40e_txrx.c 		dma_sync_single_range_for_device(rx_ring->dev, bi->dma,
dma              1594 drivers/net/ethernet/intel/i40e/i40e_txrx.c 		rx_desc->read.pkt_addr = cpu_to_le64(bi->dma + bi->page_offset);
dma              1973 drivers/net/ethernet/intel/i40e/i40e_txrx.c 				      rx_buffer->dma,
dma              2135 drivers/net/ethernet/intel/i40e/i40e_txrx.c 		dma_unmap_page_attrs(rx_ring->dev, rx_buffer->dma,
dma              3357 drivers/net/ethernet/intel/i40e/i40e_txrx.c 	dma_addr_t dma;
dma              3368 drivers/net/ethernet/intel/i40e/i40e_txrx.c 	dma = dma_map_single(tx_ring->dev, skb->data, size, DMA_TO_DEVICE);
dma              3376 drivers/net/ethernet/intel/i40e/i40e_txrx.c 		if (dma_mapping_error(tx_ring->dev, dma))
dma              3381 drivers/net/ethernet/intel/i40e/i40e_txrx.c 		dma_unmap_addr_set(tx_bi, dma, dma);
dma              3384 drivers/net/ethernet/intel/i40e/i40e_txrx.c 		max_data += -dma & (I40E_MAX_READ_REQ_SIZE - 1);
dma              3385 drivers/net/ethernet/intel/i40e/i40e_txrx.c 		tx_desc->buffer_addr = cpu_to_le64(dma);
dma              3401 drivers/net/ethernet/intel/i40e/i40e_txrx.c 			dma += max_data;
dma              3405 drivers/net/ethernet/intel/i40e/i40e_txrx.c 			tx_desc->buffer_addr = cpu_to_le64(dma);
dma              3426 drivers/net/ethernet/intel/i40e/i40e_txrx.c 		dma = skb_frag_dma_map(tx_ring->dev, frag, 0, size,
dma              3511 drivers/net/ethernet/intel/i40e/i40e_txrx.c 	dma_addr_t dma;
dma              3517 drivers/net/ethernet/intel/i40e/i40e_txrx.c 	dma = dma_map_single(xdp_ring->dev, data, size, DMA_TO_DEVICE);
dma              3518 drivers/net/ethernet/intel/i40e/i40e_txrx.c 	if (dma_mapping_error(xdp_ring->dev, dma))
dma              3528 drivers/net/ethernet/intel/i40e/i40e_txrx.c 	dma_unmap_addr_set(tx_bi, dma, dma);
dma              3531 drivers/net/ethernet/intel/i40e/i40e_txrx.c 	tx_desc->buffer_addr = cpu_to_le64(dma);
dma               292 drivers/net/ethernet/intel/i40e/i40e_txrx.h 	DEFINE_DMA_UNMAP_ADDR(dma);
dma               298 drivers/net/ethernet/intel/i40e/i40e_txrx.h 	dma_addr_t dma;
dma               403 drivers/net/ethernet/intel/i40e/i40e_txrx.h 	dma_addr_t dma;			/* physical address of ring */
dma                24 drivers/net/ethernet/intel/i40e/i40e_xsk.c 	dma_addr_t dma;
dma                28 drivers/net/ethernet/intel/i40e/i40e_xsk.c 		dma = dma_map_page_attrs(dev, umem->pgs[i], 0, PAGE_SIZE,
dma                30 drivers/net/ethernet/intel/i40e/i40e_xsk.c 		if (dma_mapping_error(dev, dma))
dma                33 drivers/net/ethernet/intel/i40e/i40e_xsk.c 		umem->pages[i].dma = dma;
dma                40 drivers/net/ethernet/intel/i40e/i40e_xsk.c 		dma_unmap_page_attrs(dev, umem->pages[i].dma, PAGE_SIZE,
dma                42 drivers/net/ethernet/intel/i40e/i40e_xsk.c 		umem->pages[i].dma = 0;
dma                62 drivers/net/ethernet/intel/i40e/i40e_xsk.c 		dma_unmap_page_attrs(dev, umem->pages[i].dma, PAGE_SIZE,
dma                65 drivers/net/ethernet/intel/i40e/i40e_xsk.c 		umem->pages[i].dma = 0;
dma               264 drivers/net/ethernet/intel/i40e/i40e_xsk.c 	bi->dma = xdp_umem_get_dma(umem, handle);
dma               265 drivers/net/ethernet/intel/i40e/i40e_xsk.c 	bi->dma += hr;
dma               301 drivers/net/ethernet/intel/i40e/i40e_xsk.c 	bi->dma = xdp_umem_get_dma(umem, handle);
dma               302 drivers/net/ethernet/intel/i40e/i40e_xsk.c 	bi->dma += hr;
dma               331 drivers/net/ethernet/intel/i40e/i40e_xsk.c 		dma_sync_single_range_for_device(rx_ring->dev, bi->dma, 0,
dma               335 drivers/net/ethernet/intel/i40e/i40e_xsk.c 		rx_desc->read.pkt_addr = cpu_to_le64(bi->dma);
dma               409 drivers/net/ethernet/intel/i40e/i40e_xsk.c 				      bi->dma, 0,
dma               435 drivers/net/ethernet/intel/i40e/i40e_xsk.c 	new_bi->dma = old_bi->dma;
dma               466 drivers/net/ethernet/intel/i40e/i40e_xsk.c 	bi->dma = xdp_umem_get_dma(rx_ring->xsk_umem, handle);
dma               467 drivers/net/ethernet/intel/i40e/i40e_xsk.c 	bi->dma += hr;
dma               653 drivers/net/ethernet/intel/i40e/i40e_xsk.c 	dma_addr_t dma;
dma               665 drivers/net/ethernet/intel/i40e/i40e_xsk.c 		dma = xdp_umem_get_dma(xdp_ring->xsk_umem, desc.addr);
dma               667 drivers/net/ethernet/intel/i40e/i40e_xsk.c 		dma_sync_single_for_device(xdp_ring->dev, dma, desc.len,
dma               674 drivers/net/ethernet/intel/i40e/i40e_xsk.c 		tx_desc->buffer_addr = cpu_to_le64(dma);
dma               707 drivers/net/ethernet/intel/i40e/i40e_xsk.c 			 dma_unmap_addr(tx_bi, dma),
dma                37 drivers/net/ethernet/intel/iavf/iavf_txrx.c 					 dma_unmap_addr(tx_buffer, dma),
dma                42 drivers/net/ethernet/intel/iavf/iavf_txrx.c 			       dma_unmap_addr(tx_buffer, dma),
dma               100 drivers/net/ethernet/intel/iavf/iavf_txrx.c 				  tx_ring->desc, tx_ring->dma);
dma               231 drivers/net/ethernet/intel/iavf/iavf_txrx.c 				 dma_unmap_addr(tx_buf, dma),
dma               256 drivers/net/ethernet/intel/iavf/iavf_txrx.c 					       dma_unmap_addr(tx_buf, dma),
dma               633 drivers/net/ethernet/intel/iavf/iavf_txrx.c 					   &tx_ring->dma, GFP_KERNEL);
dma               680 drivers/net/ethernet/intel/iavf/iavf_txrx.c 					      rx_bi->dma,
dma               686 drivers/net/ethernet/intel/iavf/iavf_txrx.c 		dma_unmap_page_attrs(rx_ring->dev, rx_bi->dma,
dma               722 drivers/net/ethernet/intel/iavf/iavf_txrx.c 				  rx_ring->desc, rx_ring->dma);
dma               751 drivers/net/ethernet/intel/iavf/iavf_txrx.c 					   &rx_ring->dma, GFP_KERNEL);
dma               814 drivers/net/ethernet/intel/iavf/iavf_txrx.c 	dma_addr_t dma;
dma               830 drivers/net/ethernet/intel/iavf/iavf_txrx.c 	dma = dma_map_page_attrs(rx_ring->dev, page, 0,
dma               838 drivers/net/ethernet/intel/iavf/iavf_txrx.c 	if (dma_mapping_error(rx_ring->dev, dma)) {
dma               844 drivers/net/ethernet/intel/iavf/iavf_txrx.c 	bi->dma = dma;
dma               897 drivers/net/ethernet/intel/iavf/iavf_txrx.c 		dma_sync_single_range_for_device(rx_ring->dev, bi->dma,
dma               905 drivers/net/ethernet/intel/iavf/iavf_txrx.c 		rx_desc->read.pkt_addr = cpu_to_le64(bi->dma + bi->page_offset);
dma              1138 drivers/net/ethernet/intel/iavf/iavf_txrx.c 	new_buff->dma		= old_buff->dma;
dma              1274 drivers/net/ethernet/intel/iavf/iavf_txrx.c 				      rx_buffer->dma,
dma              1422 drivers/net/ethernet/intel/iavf/iavf_txrx.c 		dma_unmap_page_attrs(rx_ring->dev, rx_buffer->dma,
dma              2277 drivers/net/ethernet/intel/iavf/iavf_txrx.c 	dma_addr_t dma;
dma              2287 drivers/net/ethernet/intel/iavf/iavf_txrx.c 	dma = dma_map_single(tx_ring->dev, skb->data, size, DMA_TO_DEVICE);
dma              2295 drivers/net/ethernet/intel/iavf/iavf_txrx.c 		if (dma_mapping_error(tx_ring->dev, dma))
dma              2300 drivers/net/ethernet/intel/iavf/iavf_txrx.c 		dma_unmap_addr_set(tx_bi, dma, dma);
dma              2303 drivers/net/ethernet/intel/iavf/iavf_txrx.c 		max_data += -dma & (IAVF_MAX_READ_REQ_SIZE - 1);
dma              2304 drivers/net/ethernet/intel/iavf/iavf_txrx.c 		tx_desc->buffer_addr = cpu_to_le64(dma);
dma              2319 drivers/net/ethernet/intel/iavf/iavf_txrx.c 			dma += max_data;
dma              2323 drivers/net/ethernet/intel/iavf/iavf_txrx.c 			tx_desc->buffer_addr = cpu_to_le64(dma);
dma              2343 drivers/net/ethernet/intel/iavf/iavf_txrx.c 		dma = skb_frag_dma_map(tx_ring->dev, frag, 0, size,
dma               269 drivers/net/ethernet/intel/iavf/iavf_txrx.h 	DEFINE_DMA_UNMAP_ADDR(dma);
dma               275 drivers/net/ethernet/intel/iavf/iavf_txrx.h 	dma_addr_t dma;
dma               375 drivers/net/ethernet/intel/iavf/iavf_txrx.h 	dma_addr_t dma;			/* physical address of ring */
dma               275 drivers/net/ethernet/intel/iavf/iavf_virtchnl.c 		vqpi->txq.dma_ring_addr = adapter->tx_rings[i].dma;
dma               279 drivers/net/ethernet/intel/iavf/iavf_virtchnl.c 		vqpi->rxq.dma_ring_addr = adapter->rx_rings[i].dma;
dma               559 drivers/net/ethernet/intel/ice/ice_ethtool.c 	dma_addr_t dma;
dma               565 drivers/net/ethernet/intel/ice/ice_ethtool.c 	dma = dma_map_single(tx_ring->dev, data, size, DMA_TO_DEVICE);
dma               566 drivers/net/ethernet/intel/ice/ice_ethtool.c 	if (dma_mapping_error(tx_ring->dev, dma))
dma               569 drivers/net/ethernet/intel/ice/ice_ethtool.c 	tx_desc->buf_addr = cpu_to_le64(dma);
dma               595 drivers/net/ethernet/intel/ice/ice_ethtool.c 	dma_unmap_single(tx_ring->dev, dma, size, DMA_TO_DEVICE);
dma                30 drivers/net/ethernet/intel/ice/ice_lib.c 	rlan_ctx.base = ring->dma >> 7;
dma               121 drivers/net/ethernet/intel/ice/ice_lib.c 	tlan_ctx->base = ring->dma >> ICE_TLAN_CTX_BASE_S;
dma                25 drivers/net/ethernet/intel/ice/ice_txrx.c 					 dma_unmap_addr(tx_buf, dma),
dma                30 drivers/net/ethernet/intel/ice/ice_txrx.c 			       dma_unmap_addr(tx_buf, dma),
dma                91 drivers/net/ethernet/intel/ice/ice_txrx.c 				   tx_ring->desc, tx_ring->dma);
dma               144 drivers/net/ethernet/intel/ice/ice_txrx.c 				 dma_unmap_addr(tx_buf, dma),
dma               166 drivers/net/ethernet/intel/ice/ice_txrx.c 					       dma_unmap_addr(tx_buf, dma),
dma               244 drivers/net/ethernet/intel/ice/ice_txrx.c 	tx_ring->desc = dmam_alloc_coherent(dev, tx_ring->size, &tx_ring->dma,
dma               290 drivers/net/ethernet/intel/ice/ice_txrx.c 		dma_sync_single_range_for_cpu(dev, rx_buf->dma,
dma               295 drivers/net/ethernet/intel/ice/ice_txrx.c 		dma_unmap_page_attrs(dev, rx_buf->dma, PAGE_SIZE,
dma               327 drivers/net/ethernet/intel/ice/ice_txrx.c 				   rx_ring->desc, rx_ring->dma);
dma               356 drivers/net/ethernet/intel/ice/ice_txrx.c 	rx_ring->desc = dmam_alloc_coherent(dev, rx_ring->size, &rx_ring->dma,
dma               417 drivers/net/ethernet/intel/ice/ice_txrx.c 	dma_addr_t dma;
dma               433 drivers/net/ethernet/intel/ice/ice_txrx.c 	dma = dma_map_page_attrs(rx_ring->dev, page, 0, PAGE_SIZE,
dma               439 drivers/net/ethernet/intel/ice/ice_txrx.c 	if (dma_mapping_error(rx_ring->dev, dma)) {
dma               445 drivers/net/ethernet/intel/ice/ice_txrx.c 	bi->dma = dma;
dma               487 drivers/net/ethernet/intel/ice/ice_txrx.c 		dma_sync_single_range_for_device(rx_ring->dev, bi->dma,
dma               495 drivers/net/ethernet/intel/ice/ice_txrx.c 		rx_desc->read.pkt_addr = cpu_to_le64(bi->dma + bi->page_offset);
dma               643 drivers/net/ethernet/intel/ice/ice_txrx.c 	new_buf->dma = old_buf->dma;
dma               671 drivers/net/ethernet/intel/ice/ice_txrx.c 	dma_sync_single_range_for_cpu(rx_ring->dev, rx_buf->dma,
dma               762 drivers/net/ethernet/intel/ice/ice_txrx.c 		dma_unmap_page_attrs(rx_ring->dev, rx_buf->dma, PAGE_SIZE,
dma              1600 drivers/net/ethernet/intel/ice/ice_txrx.c 	dma_addr_t dma;
dma              1618 drivers/net/ethernet/intel/ice/ice_txrx.c 	dma = dma_map_single(tx_ring->dev, skb->data, size, DMA_TO_DEVICE);
dma              1625 drivers/net/ethernet/intel/ice/ice_txrx.c 		if (dma_mapping_error(tx_ring->dev, dma))
dma              1630 drivers/net/ethernet/intel/ice/ice_txrx.c 		dma_unmap_addr_set(tx_buf, dma, dma);
dma              1633 drivers/net/ethernet/intel/ice/ice_txrx.c 		max_data += -dma & (ICE_MAX_READ_REQ_SIZE - 1);
dma              1634 drivers/net/ethernet/intel/ice/ice_txrx.c 		tx_desc->buf_addr = cpu_to_le64(dma);
dma              1651 drivers/net/ethernet/intel/ice/ice_txrx.c 			dma += max_data;
dma              1655 drivers/net/ethernet/intel/ice/ice_txrx.c 			tx_desc->buf_addr = cpu_to_le64(dma);
dma              1675 drivers/net/ethernet/intel/ice/ice_txrx.c 		dma = skb_frag_dma_map(tx_ring->dev, frag, 0, size,
dma                62 drivers/net/ethernet/intel/ice/ice_txrx.h 	DEFINE_DMA_UNMAP_ADDR(dma);
dma                78 drivers/net/ethernet/intel/ice/ice_txrx.h 	dma_addr_t dma;
dma               204 drivers/net/ethernet/intel/ice/ice_txrx.h 	dma_addr_t dma;			/* physical address of ring */
dma              2280 drivers/net/ethernet/intel/ice/ice_virtchnl_pf.c 			vsi->tx_rings[i]->dma = qpi->txq.dma_ring_addr;
dma              2287 drivers/net/ethernet/intel/ice/ice_virtchnl_pf.c 			vsi->rx_rings[i]->dma = qpi->rxq.dma_ring_addr;
dma               208 drivers/net/ethernet/intel/igb/igb.h 	DEFINE_DMA_UNMAP_ADDR(dma);
dma               214 drivers/net/ethernet/intel/igb/igb.h 	dma_addr_t dma;
dma               259 drivers/net/ethernet/intel/igb/igb.h 	dma_addr_t dma;			/* phys address of the ring */
dma              1821 drivers/net/ethernet/intel/igb/igb_ethtool.c 					rx_buffer_info->dma,
dma              1831 drivers/net/ethernet/intel/igb/igb_ethtool.c 					   rx_buffer_info->dma,
dma              1843 drivers/net/ethernet/intel/igb/igb_ethtool.c 				 dma_unmap_addr(tx_buffer_info, dma),
dma               402 drivers/net/ethernet/intel/igb/igb_main.c 			(u64)dma_unmap_addr(buffer_info, dma),
dma               451 drivers/net/ethernet/intel/igb/igb_main.c 				(u64)dma_unmap_addr(buffer_info, dma),
dma               538 drivers/net/ethernet/intel/igb/igb_main.c 					(u64)buffer_info->dma,
dma               542 drivers/net/ethernet/intel/igb/igb_main.c 				    buffer_info->dma && buffer_info->page) {
dma              4054 drivers/net/ethernet/intel/igb/igb_main.c 					   &tx_ring->dma, GFP_KERNEL);
dma              4134 drivers/net/ethernet/intel/igb/igb_main.c 	u64 tdba = ring->dma;
dma              4203 drivers/net/ethernet/intel/igb/igb_main.c 					   &rx_ring->dma, GFP_KERNEL);
dma              4480 drivers/net/ethernet/intel/igb/igb_main.c 	u64 rdba = ring->dma;
dma              4596 drivers/net/ethernet/intel/igb/igb_main.c 			  tx_ring->desc, tx_ring->dma);
dma              4633 drivers/net/ethernet/intel/igb/igb_main.c 				 dma_unmap_addr(tx_buffer, dma),
dma              4655 drivers/net/ethernet/intel/igb/igb_main.c 					       dma_unmap_addr(tx_buffer, dma),
dma              4708 drivers/net/ethernet/intel/igb/igb_main.c 			  rx_ring->desc, rx_ring->dma);
dma              4747 drivers/net/ethernet/intel/igb/igb_main.c 					      buffer_info->dma,
dma              4754 drivers/net/ethernet/intel/igb/igb_main.c 				     buffer_info->dma,
dma              5923 drivers/net/ethernet/intel/igb/igb_main.c 	dma_addr_t dma;
dma              5936 drivers/net/ethernet/intel/igb/igb_main.c 	dma = dma_map_single(tx_ring->dev, skb->data, size, DMA_TO_DEVICE);
dma              5941 drivers/net/ethernet/intel/igb/igb_main.c 		if (dma_mapping_error(tx_ring->dev, dma))
dma              5946 drivers/net/ethernet/intel/igb/igb_main.c 		dma_unmap_addr_set(tx_buffer, dma, dma);
dma              5948 drivers/net/ethernet/intel/igb/igb_main.c 		tx_desc->read.buffer_addr = cpu_to_le64(dma);
dma              5962 drivers/net/ethernet/intel/igb/igb_main.c 			dma += IGB_MAX_DATA_PER_TXD;
dma              5965 drivers/net/ethernet/intel/igb/igb_main.c 			tx_desc->read.buffer_addr = cpu_to_le64(dma);
dma              5984 drivers/net/ethernet/intel/igb/igb_main.c 		dma = skb_frag_dma_map(tx_ring->dev, frag, 0,
dma              6035 drivers/net/ethernet/intel/igb/igb_main.c 				       dma_unmap_addr(tx_buffer, dma),
dma              6047 drivers/net/ethernet/intel/igb/igb_main.c 				 dma_unmap_addr(tx_buffer, dma),
dma              7788 drivers/net/ethernet/intel/igb/igb_main.c 				 dma_unmap_addr(tx_buffer, dma),
dma              7809 drivers/net/ethernet/intel/igb/igb_main.c 					       dma_unmap_addr(tx_buffer, dma),
dma              7932 drivers/net/ethernet/intel/igb/igb_main.c 	new_buff->dma		= old_buff->dma;
dma              8272 drivers/net/ethernet/intel/igb/igb_main.c 				      rx_buffer->dma,
dma              8292 drivers/net/ethernet/intel/igb/igb_main.c 		dma_unmap_page_attrs(rx_ring->dev, rx_buffer->dma,
dma              8403 drivers/net/ethernet/intel/igb/igb_main.c 	dma_addr_t dma;
dma              8417 drivers/net/ethernet/intel/igb/igb_main.c 	dma = dma_map_page_attrs(rx_ring->dev, page, 0,
dma              8425 drivers/net/ethernet/intel/igb/igb_main.c 	if (dma_mapping_error(rx_ring->dev, dma)) {
dma              8432 drivers/net/ethernet/intel/igb/igb_main.c 	bi->dma = dma;
dma              8466 drivers/net/ethernet/intel/igb/igb_main.c 		dma_sync_single_range_for_device(rx_ring->dev, bi->dma,
dma              8473 drivers/net/ethernet/intel/igb/igb_main.c 		rx_desc->read.pkt_addr = cpu_to_le64(bi->dma + bi->page_offset);
dma               100 drivers/net/ethernet/intel/igbvf/igbvf.h 	dma_addr_t dma;
dma               128 drivers/net/ethernet/intel/igbvf/igbvf.h 	dma_addr_t dma;		/* phys address of ring    */
dma               189 drivers/net/ethernet/intel/igbvf/netdev.c 			buffer_info->dma = dma_map_single(&pdev->dev, skb->data,
dma               192 drivers/net/ethernet/intel/igbvf/netdev.c 			if (dma_mapping_error(&pdev->dev, buffer_info->dma)) {
dma               205 drivers/net/ethernet/intel/igbvf/netdev.c 			rx_desc->read.hdr_addr = cpu_to_le64(buffer_info->dma);
dma               207 drivers/net/ethernet/intel/igbvf/netdev.c 			rx_desc->read.pkt_addr = cpu_to_le64(buffer_info->dma);
dma               288 drivers/net/ethernet/intel/igbvf/netdev.c 			dma_unmap_single(&pdev->dev, buffer_info->dma,
dma               291 drivers/net/ethernet/intel/igbvf/netdev.c 			buffer_info->dma = 0;
dma               297 drivers/net/ethernet/intel/igbvf/netdev.c 			dma_unmap_single(&pdev->dev, buffer_info->dma,
dma               300 drivers/net/ethernet/intel/igbvf/netdev.c 			buffer_info->dma = 0;
dma               335 drivers/net/ethernet/intel/igbvf/netdev.c 			buffer_info->dma = next_buffer->dma;
dma               337 drivers/net/ethernet/intel/igbvf/netdev.c 			next_buffer->dma = 0;
dma               388 drivers/net/ethernet/intel/igbvf/netdev.c 	if (buffer_info->dma) {
dma               391 drivers/net/ethernet/intel/igbvf/netdev.c 				       buffer_info->dma,
dma               396 drivers/net/ethernet/intel/igbvf/netdev.c 					 buffer_info->dma,
dma               399 drivers/net/ethernet/intel/igbvf/netdev.c 		buffer_info->dma = 0;
dma               430 drivers/net/ethernet/intel/igbvf/netdev.c 					   &tx_ring->dma, GFP_KERNEL);
dma               470 drivers/net/ethernet/intel/igbvf/netdev.c 					   &rx_ring->dma, GFP_KERNEL);
dma               538 drivers/net/ethernet/intel/igbvf/netdev.c 			  tx_ring->dma);
dma               561 drivers/net/ethernet/intel/igbvf/netdev.c 		if (buffer_info->dma) {
dma               563 drivers/net/ethernet/intel/igbvf/netdev.c 				dma_unmap_single(&pdev->dev, buffer_info->dma,
dma               567 drivers/net/ethernet/intel/igbvf/netdev.c 				dma_unmap_single(&pdev->dev, buffer_info->dma,
dma               571 drivers/net/ethernet/intel/igbvf/netdev.c 			buffer_info->dma = 0;
dma               622 drivers/net/ethernet/intel/igbvf/netdev.c 			  rx_ring->dma);
dma              1296 drivers/net/ethernet/intel/igbvf/netdev.c 	tdba = tx_ring->dma;
dma              1378 drivers/net/ethernet/intel/igbvf/netdev.c 	rdba = rx_ring->dma;
dma              1994 drivers/net/ethernet/intel/igbvf/netdev.c 	buffer_info->dma = 0;
dma              2171 drivers/net/ethernet/intel/igbvf/netdev.c 	buffer_info->dma = dma_map_single(&pdev->dev, skb->data, len,
dma              2173 drivers/net/ethernet/intel/igbvf/netdev.c 	if (dma_mapping_error(&pdev->dev, buffer_info->dma))
dma              2192 drivers/net/ethernet/intel/igbvf/netdev.c 		buffer_info->dma = skb_frag_dma_map(&pdev->dev, frag, 0, len,
dma              2194 drivers/net/ethernet/intel/igbvf/netdev.c 		if (dma_mapping_error(&pdev->dev, buffer_info->dma))
dma              2206 drivers/net/ethernet/intel/igbvf/netdev.c 	buffer_info->dma = 0;
dma              2262 drivers/net/ethernet/intel/igbvf/netdev.c 		tx_desc->read.buffer_addr = cpu_to_le64(buffer_info->dma);
dma               191 drivers/net/ethernet/intel/igc/igc.h 	DEFINE_DMA_UNMAP_ADDR(dma);
dma               197 drivers/net/ethernet/intel/igc/igc.h 	dma_addr_t dma;
dma               254 drivers/net/ethernet/intel/igc/igc.h 	dma_addr_t dma;                 /* phys address of the ring */
dma               196 drivers/net/ethernet/intel/igc/igc_main.c 			  tx_ring->desc, tx_ring->dma);
dma               232 drivers/net/ethernet/intel/igc/igc_main.c 				 dma_unmap_addr(tx_buffer, dma),
dma               254 drivers/net/ethernet/intel/igc/igc_main.c 					       dma_unmap_addr(tx_buffer, dma),
dma               310 drivers/net/ethernet/intel/igc/igc_main.c 					   &tx_ring->dma, GFP_KERNEL);
dma               371 drivers/net/ethernet/intel/igc/igc_main.c 					      buffer_info->dma,
dma               378 drivers/net/ethernet/intel/igc/igc_main.c 				     buffer_info->dma,
dma               426 drivers/net/ethernet/intel/igc/igc_main.c 			  rx_ring->desc, rx_ring->dma);
dma               468 drivers/net/ethernet/intel/igc/igc_main.c 					   &rx_ring->dma, GFP_KERNEL);
dma               527 drivers/net/ethernet/intel/igc/igc_main.c 	u64 rdba = ring->dma;
dma               605 drivers/net/ethernet/intel/igc/igc_main.c 	u64 tdba = ring->dma;
dma               962 drivers/net/ethernet/intel/igc/igc_main.c 	dma_addr_t dma;
dma               972 drivers/net/ethernet/intel/igc/igc_main.c 	dma = dma_map_single(tx_ring->dev, skb->data, size, DMA_TO_DEVICE);
dma               977 drivers/net/ethernet/intel/igc/igc_main.c 		if (dma_mapping_error(tx_ring->dev, dma))
dma               982 drivers/net/ethernet/intel/igc/igc_main.c 		dma_unmap_addr_set(tx_buffer, dma, dma);
dma               984 drivers/net/ethernet/intel/igc/igc_main.c 		tx_desc->read.buffer_addr = cpu_to_le64(dma);
dma               998 drivers/net/ethernet/intel/igc/igc_main.c 			dma += IGC_MAX_DATA_PER_TXD;
dma              1001 drivers/net/ethernet/intel/igc/igc_main.c 			tx_desc->read.buffer_addr = cpu_to_le64(dma);
dma              1020 drivers/net/ethernet/intel/igc/igc_main.c 		dma = skb_frag_dma_map(tx_ring->dev, frag, 0,
dma              1071 drivers/net/ethernet/intel/igc/igc_main.c 				       dma_unmap_addr(tx_buffer, dma),
dma              1083 drivers/net/ethernet/intel/igc/igc_main.c 				 dma_unmap_addr(tx_buffer, dma),
dma              1207 drivers/net/ethernet/intel/igc/igc_main.c 				      rx_buffer->dma,
dma              1360 drivers/net/ethernet/intel/igc/igc_main.c 	new_buff->dma		= old_buff->dma;
dma              1477 drivers/net/ethernet/intel/igc/igc_main.c 		dma_unmap_page_attrs(rx_ring->dev, rx_buffer->dma,
dma              1514 drivers/net/ethernet/intel/igc/igc_main.c 		dma_sync_single_range_for_device(rx_ring->dev, bi->dma,
dma              1521 drivers/net/ethernet/intel/igc/igc_main.c 		rx_desc->read.pkt_addr = cpu_to_le64(bi->dma + bi->page_offset);
dma              1657 drivers/net/ethernet/intel/igc/igc_main.c 	dma_addr_t dma;
dma              1671 drivers/net/ethernet/intel/igc/igc_main.c 	dma = dma_map_page_attrs(rx_ring->dev, page, 0,
dma              1679 drivers/net/ethernet/intel/igc/igc_main.c 	if (dma_mapping_error(rx_ring->dev, dma)) {
dma              1686 drivers/net/ethernet/intel/igc/igc_main.c 	bi->dma = dma;
dma              1744 drivers/net/ethernet/intel/igc/igc_main.c 				 dma_unmap_addr(tx_buffer, dma),
dma              1765 drivers/net/ethernet/intel/igc/igc_main.c 					       dma_unmap_addr(tx_buffer, dma),
dma                77 drivers/net/ethernet/intel/ixgb/ixgb.h 	dma_addr_t dma;
dma                88 drivers/net/ethernet/intel/ixgb/ixgb.h 	dma_addr_t dma;
dma               683 drivers/net/ethernet/intel/ixgb/ixgb_main.c 	txdr->desc = dma_alloc_coherent(&pdev->dev, txdr->size, &txdr->dma,
dma               706 drivers/net/ethernet/intel/ixgb/ixgb_main.c 	u64 tdba = adapter->tx_ring.dma;
dma               766 drivers/net/ethernet/intel/ixgb/ixgb_main.c 	rxdr->desc = dma_alloc_coherent(&pdev->dev, rxdr->size, &rxdr->dma,
dma               823 drivers/net/ethernet/intel/ixgb/ixgb_main.c 	u64 rdba = adapter->rx_ring.dma;
dma               886 drivers/net/ethernet/intel/ixgb/ixgb_main.c 			  adapter->tx_ring.desc, adapter->tx_ring.dma);
dma               895 drivers/net/ethernet/intel/ixgb/ixgb_main.c 	if (buffer_info->dma) {
dma               897 drivers/net/ethernet/intel/ixgb/ixgb_main.c 			dma_unmap_page(&adapter->pdev->dev, buffer_info->dma,
dma               900 drivers/net/ethernet/intel/ixgb/ixgb_main.c 			dma_unmap_single(&adapter->pdev->dev, buffer_info->dma,
dma               902 drivers/net/ethernet/intel/ixgb/ixgb_main.c 		buffer_info->dma = 0;
dma               968 drivers/net/ethernet/intel/ixgb/ixgb_main.c 			  rx_ring->dma);
dma               991 drivers/net/ethernet/intel/ixgb/ixgb_main.c 		if (buffer_info->dma) {
dma               993 drivers/net/ethernet/intel/ixgb/ixgb_main.c 					 buffer_info->dma,
dma               996 drivers/net/ethernet/intel/ixgb/ixgb_main.c 			buffer_info->dma = 0;
dma              1219 drivers/net/ethernet/intel/ixgb/ixgb_main.c 		WARN_ON(buffer_info->dma != 0);
dma              1263 drivers/net/ethernet/intel/ixgb/ixgb_main.c 		WARN_ON(buffer_info->dma != 0);
dma              1313 drivers/net/ethernet/intel/ixgb/ixgb_main.c 		WARN_ON(buffer_info->dma != 0);
dma              1316 drivers/net/ethernet/intel/ixgb/ixgb_main.c 		buffer_info->dma = dma_map_single(&pdev->dev,
dma              1319 drivers/net/ethernet/intel/ixgb/ixgb_main.c 		if (dma_mapping_error(&pdev->dev, buffer_info->dma))
dma              1355 drivers/net/ethernet/intel/ixgb/ixgb_main.c 			buffer_info->dma =
dma              1358 drivers/net/ethernet/intel/ixgb/ixgb_main.c 			if (dma_mapping_error(&pdev->dev, buffer_info->dma))
dma              1374 drivers/net/ethernet/intel/ixgb/ixgb_main.c 	buffer_info->dma = 0;
dma              1416 drivers/net/ethernet/intel/ixgb/ixgb_main.c 		tx_desc->buff_addr = cpu_to_le64(buffer_info->dma);
dma              1981 drivers/net/ethernet/intel/ixgb/ixgb_main.c 				 buffer_info->dma,
dma              1984 drivers/net/ethernet/intel/ixgb/ixgb_main.c 		buffer_info->dma = 0;
dma              2087 drivers/net/ethernet/intel/ixgb/ixgb_main.c 		buffer_info->dma = dma_map_single(&pdev->dev,
dma              2091 drivers/net/ethernet/intel/ixgb/ixgb_main.c 		if (dma_mapping_error(&pdev->dev, buffer_info->dma)) {
dma              2097 drivers/net/ethernet/intel/ixgb/ixgb_main.c 		rx_desc->buff_addr = cpu_to_le64(buffer_info->dma);
dma               221 drivers/net/ethernet/intel/ixgbe/ixgbe.h 	DEFINE_DMA_UNMAP_ADDR(dma);
dma               228 drivers/net/ethernet/intel/ixgbe/ixgbe.h 	dma_addr_t dma;
dma               321 drivers/net/ethernet/intel/ixgbe/ixgbe.h 	dma_addr_t dma;			/* phys. address of descriptor ring */
dma               820 drivers/net/ethernet/intel/ixgbe/ixgbe.h 	dma_addr_t dma;
dma              1916 drivers/net/ethernet/intel/ixgbe/ixgbe_ethtool.c 				 dma_unmap_addr(tx_buffer, dma),
dma              1935 drivers/net/ethernet/intel/ixgbe/ixgbe_ethtool.c 					rx_buffer->dma,
dma              1947 drivers/net/ethernet/intel/ixgbe/ixgbe_ethtool.c 					   rx_buffer->dma,
dma               755 drivers/net/ethernet/intel/ixgbe/ixgbe_fcoe.c 	dma_addr_t dma;
dma               767 drivers/net/ethernet/intel/ixgbe/ixgbe_fcoe.c 	dma = dma_map_single(dev, buffer, IXGBE_FCBUFF_MIN, DMA_FROM_DEVICE);
dma               768 drivers/net/ethernet/intel/ixgbe/ixgbe_fcoe.c 	if (dma_mapping_error(dev, dma)) {
dma               775 drivers/net/ethernet/intel/ixgbe/ixgbe_fcoe.c 	fcoe->extra_ddp_buffer_dma = dma;
dma               557 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 		(u64)dma_unmap_addr(tx_buffer, dma),
dma               692 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 					(u64)dma_unmap_addr(tx_buffer, dma),
dma               811 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 					(u64)rx_buffer_info->dma,
dma               816 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 				    rx_buffer_info->dma) {
dma              1160 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 				 dma_unmap_addr(tx_buffer, dma),
dma              1181 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 					       dma_unmap_addr(tx_buffer, dma),
dma              1534 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 	dma_addr_t dma;
dma              1548 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 	dma = dma_map_page_attrs(rx_ring->dev, page, 0,
dma              1557 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 	if (dma_mapping_error(rx_ring->dev, dma)) {
dma              1564 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 	bi->dma = dma;
dma              1601 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 		dma_sync_single_range_for_device(rx_ring->dev, bi->dma,
dma              1609 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 		rx_desc->read.pkt_addr = cpu_to_le64(bi->dma + bi->page_offset);
dma              1833 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 					      IXGBE_CB(skb)->dma,
dma              1841 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 					      IXGBE_CB(skb)->dma,
dma              1849 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 		dma_unmap_page_attrs(rx_ring->dev, IXGBE_CB(skb)->dma,
dma              1939 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 	new_buff->dma		= old_buff->dma;
dma              2048 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 				      rx_buffer->dma,
dma              2066 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 		if (!IS_ERR(skb) && IXGBE_CB(skb)->dma == rx_buffer->dma) {
dma              2071 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 			dma_unmap_page_attrs(rx_ring->dev, rx_buffer->dma,
dma              2127 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 			IXGBE_CB(skb)->dma = rx_buffer->dma;
dma              2184 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 		IXGBE_CB(skb)->dma = rx_buffer->dma;
dma              3481 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 	u64 tdba = ring->dma;
dma              4070 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 	u64 rdba = ring->dma;
dma              5304 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 						     IXGBE_CB(skb)->dma,
dma              5315 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 					      rx_buffer->dma,
dma              5321 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 		dma_unmap_page_attrs(rx_ring->dev, rx_buffer->dma,
dma              5997 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 				 dma_unmap_addr(tx_buffer, dma),
dma              6019 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 					       dma_unmap_addr(tx_buffer, dma),
dma              6468 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 					   &tx_ring->dma,
dma              6473 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 						   &tx_ring->dma, GFP_KERNEL);
dma              6562 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 					   &rx_ring->dma,
dma              6567 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 						   &rx_ring->dma, GFP_KERNEL);
dma              6642 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 			  tx_ring->desc, tx_ring->dma);
dma              6685 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 			  rx_ring->desc, rx_ring->dma);
dma              8208 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 	dma_addr_t dma;
dma              8232 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 	dma = dma_map_single(tx_ring->dev, skb->data, size, DMA_TO_DEVICE);
dma              8237 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 		if (dma_mapping_error(tx_ring->dev, dma))
dma              8242 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 		dma_unmap_addr_set(tx_buffer, dma, dma);
dma              8244 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 		tx_desc->read.buffer_addr = cpu_to_le64(dma);
dma              8258 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 			dma += IXGBE_MAX_DATA_PER_TXD;
dma              8261 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 			tx_desc->read.buffer_addr = cpu_to_le64(dma);
dma              8284 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 		dma = skb_frag_dma_map(tx_ring->dev, frag, 0, size,
dma              8335 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 				       dma_unmap_addr(tx_buffer, dma),
dma              8555 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 	dma_addr_t dma;
dma              8563 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 	dma = dma_map_single(ring->dev, xdpf->data, len, DMA_TO_DEVICE);
dma              8564 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 	if (dma_mapping_error(ring->dev, dma))
dma              8577 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 	dma_unmap_addr_set(tx_buffer, dma, dma);
dma              8580 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 	tx_desc->read.buffer_addr = cpu_to_le64(dma);
dma                28 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 	dma_addr_t dma;
dma                31 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 		dma = dma_map_page_attrs(dev, umem->pgs[i], 0, PAGE_SIZE,
dma                33 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 		if (dma_mapping_error(dev, dma))
dma                36 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 		umem->pages[i].dma = dma;
dma                43 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 		dma_unmap_page_attrs(dev, umem->pages[i].dma, PAGE_SIZE,
dma                45 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 		umem->pages[i].dma = 0;
dma                58 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 		dma_unmap_page_attrs(dev, umem->pages[i].dma, PAGE_SIZE,
dma                61 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 		umem->pages[i].dma = 0;
dma               199 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 				      bi->dma, 0,
dma               218 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 	nbi->dma = obi->dma;
dma               245 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 	bi->dma = xdp_umem_get_dma(rx_ring->xsk_umem, handle);
dma               246 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 	bi->dma += hr;
dma               272 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 	bi->dma = xdp_umem_get_dma(umem, handle);
dma               273 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 	bi->dma += hr;
dma               299 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 	bi->dma = xdp_umem_get_dma(umem, handle);
dma               300 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 	bi->dma += hr;
dma               336 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 		dma_sync_single_range_for_device(rx_ring->dev, bi->dma,
dma               344 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 		rx_desc->read.pkt_addr = cpu_to_le64(bi->dma);
dma               582 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 	dma_addr_t dma;
dma               595 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 		dma = xdp_umem_get_dma(xdp_ring->xsk_umem, desc.addr);
dma               597 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 		dma_sync_single_for_device(xdp_ring->dev, dma, desc.len,
dma               606 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 		tx_desc->read.buffer_addr = cpu_to_le64(dma);
dma               635 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 			 dma_unmap_addr(tx_bi, dma),
dma                40 drivers/net/ethernet/intel/ixgbevf/ixgbevf.h 	DEFINE_DMA_UNMAP_ADDR(dma);
dma                46 drivers/net/ethernet/intel/ixgbevf/ixgbevf.h 	dma_addr_t dma;
dma                97 drivers/net/ethernet/intel/ixgbevf/ixgbevf.h 	dma_addr_t dma;			/* phys. address of descriptor ring */
dma               314 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 				 dma_unmap_addr(tx_buffer, dma),
dma               335 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 					       dma_unmap_addr(tx_buffer, dma),
dma               540 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 				      rx_buffer->dma,
dma               562 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 			dma_unmap_page_attrs(rx_ring->dev, rx_buffer->dma,
dma               610 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 	dma_addr_t dma;
dma               624 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 	dma = dma_map_page_attrs(rx_ring->dev, page, 0,
dma               631 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 	if (dma_mapping_error(rx_ring->dev, dma)) {
dma               638 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 	bi->dma = dma;
dma               672 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 		dma_sync_single_range_for_device(rx_ring->dev, bi->dma,
dma               680 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 		rx_desc->read.pkt_addr = cpu_to_le64(bi->dma + bi->page_offset);
dma               781 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 	new_buff->dma = old_buff->dma;
dma               989 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 	dma_addr_t dma;
dma               997 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 	dma = dma_map_single(ring->dev, xdp->data, len, DMA_TO_DEVICE);
dma               998 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 	if (dma_mapping_error(ring->dev, dma))
dma              1006 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 	dma_unmap_addr_set(tx_buffer, dma, dma);
dma              1039 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 	tx_desc->read.buffer_addr = cpu_to_le64(dma);
dma              1680 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 	u64 tdba = ring->dma;
dma              1902 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 	u64 rdba = ring->dma;
dma              2347 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 					      rx_buffer->dma,
dma              2354 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 				     rx_buffer->dma,
dma              2392 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 				 dma_unmap_addr(tx_buffer, dma),
dma              2414 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 					       dma_unmap_addr(tx_buffer, dma),
dma              3354 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 			  tx_ring->dma);
dma              3400 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 					   &tx_ring->dma, GFP_KERNEL);
dma              3478 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 					   &rx_ring->dma, GFP_KERNEL);
dma              3544 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 			  rx_ring->dma);
dma              3952 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 	dma_addr_t dma;
dma              3965 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 	dma = dma_map_single(tx_ring->dev, skb->data, size, DMA_TO_DEVICE);
dma              3970 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 		if (dma_mapping_error(tx_ring->dev, dma))
dma              3975 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 		dma_unmap_addr_set(tx_buffer, dma, dma);
dma              3977 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 		tx_desc->read.buffer_addr = cpu_to_le64(dma);
dma              3991 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 			dma += IXGBE_MAX_DATA_PER_TXD;
dma              3994 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 			tx_desc->read.buffer_addr = cpu_to_le64(dma);
dma              4013 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 		dma = skb_frag_dma_map(tx_ring->dev, frag, 0, size,
dma              4058 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 				       dma_unmap_addr(tx_buffer, dma),
dma              4070 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 				 dma_unmap_addr(tx_buffer, dma),
dma               573 drivers/net/ethernet/jme.c 	txring->dma		= ALIGN(txring->dmaalloc, RING_DESC_ALIGN);
dma               595 drivers/net/ethernet/jme.c 	txring->dma = 0;
dma               632 drivers/net/ethernet/jme.c 		txring->dma		= 0;
dma               652 drivers/net/ethernet/jme.c 	jwrite32(jme, JME_TXDBA_LO, (__u64)jme->txring[0].dma & 0xFFFFFFFFUL);
dma               653 drivers/net/ethernet/jme.c 	jwrite32(jme, JME_TXDBA_HI, (__u64)(jme->txring[0].dma) >> 32);
dma               654 drivers/net/ethernet/jme.c 	jwrite32(jme, JME_TXNDA, (__u64)jme->txring[0].dma & 0xFFFFFFFFUL);
dma               794 drivers/net/ethernet/jme.c 		rxring->dma      = 0;
dma               819 drivers/net/ethernet/jme.c 	rxring->dma		= ALIGN(rxring->dmaalloc, RING_DESC_ALIGN);
dma               851 drivers/net/ethernet/jme.c 	rxring->dma = 0;
dma               870 drivers/net/ethernet/jme.c 	jwrite32(jme, JME_RXDBA_LO, (__u64)(jme->rxring[0].dma) & 0xFFFFFFFFUL);
dma               871 drivers/net/ethernet/jme.c 	jwrite32(jme, JME_RXDBA_HI, (__u64)(jme->rxring[0].dma) >> 32);
dma               872 drivers/net/ethernet/jme.c 	jwrite32(jme, JME_RXNDA, (__u64)(jme->rxring[0].dma) & 0xFFFFFFFFUL);
dma               367 drivers/net/ethernet/jme.h 	dma_addr_t dma;		/* phys address for ring dma */
dma                84 drivers/net/ethernet/lantiq_etop.c 	struct ltq_dma_channel dma;
dma               107 drivers/net/ethernet/lantiq_etop.c 	ch->skb[ch->dma.desc] = netdev_alloc_skb(ch->netdev, MAX_DMA_DATA_LEN);
dma               108 drivers/net/ethernet/lantiq_etop.c 	if (!ch->skb[ch->dma.desc])
dma               110 drivers/net/ethernet/lantiq_etop.c 	ch->dma.desc_base[ch->dma.desc].addr = dma_map_single(&priv->pdev->dev,
dma               111 drivers/net/ethernet/lantiq_etop.c 		ch->skb[ch->dma.desc]->data, MAX_DMA_DATA_LEN,
dma               113 drivers/net/ethernet/lantiq_etop.c 	ch->dma.desc_base[ch->dma.desc].addr =
dma               114 drivers/net/ethernet/lantiq_etop.c 		CPHYSADDR(ch->skb[ch->dma.desc]->data);
dma               115 drivers/net/ethernet/lantiq_etop.c 	ch->dma.desc_base[ch->dma.desc].ctl =
dma               118 drivers/net/ethernet/lantiq_etop.c 	skb_reserve(ch->skb[ch->dma.desc], NET_IP_ALIGN);
dma               126 drivers/net/ethernet/lantiq_etop.c 	struct ltq_dma_desc *desc = &ch->dma.desc_base[ch->dma.desc];
dma               127 drivers/net/ethernet/lantiq_etop.c 	struct sk_buff *skb = ch->skb[ch->dma.desc];
dma               135 drivers/net/ethernet/lantiq_etop.c 		ltq_dma_close(&ch->dma);
dma               137 drivers/net/ethernet/lantiq_etop.c 	ch->dma.desc++;
dma               138 drivers/net/ethernet/lantiq_etop.c 	ch->dma.desc %= LTQ_DESC_NUM;
dma               154 drivers/net/ethernet/lantiq_etop.c 		struct ltq_dma_desc *desc = &ch->dma.desc_base[ch->dma.desc];
dma               163 drivers/net/ethernet/lantiq_etop.c 		ltq_dma_ack_irq(&ch->dma);
dma               179 drivers/net/ethernet/lantiq_etop.c 	while ((ch->dma.desc_base[ch->tx_free].ctl &
dma               183 drivers/net/ethernet/lantiq_etop.c 		memset(&ch->dma.desc_base[ch->tx_free], 0,
dma               193 drivers/net/ethernet/lantiq_etop.c 	ltq_dma_ack_irq(&ch->dma);
dma               212 drivers/net/ethernet/lantiq_etop.c 	ltq_dma_free(&ch->dma);
dma               213 drivers/net/ethernet/lantiq_etop.c 	if (ch->dma.irq)
dma               214 drivers/net/ethernet/lantiq_etop.c 		free_irq(ch->dma.irq, priv);
dma               218 drivers/net/ethernet/lantiq_etop.c 			dev_kfree_skb_any(ch->skb[ch->dma.desc]);
dma               268 drivers/net/ethernet/lantiq_etop.c 		ch->idx = ch->dma.nr = i;
dma               269 drivers/net/ethernet/lantiq_etop.c 		ch->dma.dev = &priv->pdev->dev;
dma               272 drivers/net/ethernet/lantiq_etop.c 			ltq_dma_alloc_tx(&ch->dma);
dma               275 drivers/net/ethernet/lantiq_etop.c 			ltq_dma_alloc_rx(&ch->dma);
dma               276 drivers/net/ethernet/lantiq_etop.c 			for (ch->dma.desc = 0; ch->dma.desc < LTQ_DESC_NUM;
dma               277 drivers/net/ethernet/lantiq_etop.c 					ch->dma.desc++)
dma               280 drivers/net/ethernet/lantiq_etop.c 			ch->dma.desc = 0;
dma               283 drivers/net/ethernet/lantiq_etop.c 		ch->dma.irq = irq;
dma               426 drivers/net/ethernet/lantiq_etop.c 		ltq_dma_open(&ch->dma);
dma               427 drivers/net/ethernet/lantiq_etop.c 		ltq_dma_enable_irq(&ch->dma);
dma               449 drivers/net/ethernet/lantiq_etop.c 		ltq_dma_close(&ch->dma);
dma               461 drivers/net/ethernet/lantiq_etop.c 	struct ltq_dma_desc *desc = &ch->dma.desc_base[ch->dma.desc];
dma               468 drivers/net/ethernet/lantiq_etop.c 	if ((desc->ctl & (LTQ_DMA_OWN | LTQ_DMA_C)) || ch->skb[ch->dma.desc]) {
dma               477 drivers/net/ethernet/lantiq_etop.c 	ch->skb[ch->dma.desc] = skb;
dma               487 drivers/net/ethernet/lantiq_etop.c 	ch->dma.desc++;
dma               488 drivers/net/ethernet/lantiq_etop.c 	ch->dma.desc %= LTQ_DESC_NUM;
dma               491 drivers/net/ethernet/lantiq_etop.c 	if (ch->dma.desc_base[ch->dma.desc].ctl & LTQ_DMA_OWN)
dma                59 drivers/net/ethernet/lantiq_xrx200.c 	struct ltq_dma_channel dma;
dma               103 drivers/net/ethernet/lantiq_xrx200.c 		struct ltq_dma_desc *desc = &ch->dma.desc_base[ch->dma.desc];
dma               110 drivers/net/ethernet/lantiq_xrx200.c 		ch->dma.desc++;
dma               111 drivers/net/ethernet/lantiq_xrx200.c 		ch->dma.desc %= LTQ_DESC_NUM;
dma               120 drivers/net/ethernet/lantiq_xrx200.c 	ltq_dma_open(&priv->chan_tx.dma);
dma               121 drivers/net/ethernet/lantiq_xrx200.c 	ltq_dma_enable_irq(&priv->chan_tx.dma);
dma               124 drivers/net/ethernet/lantiq_xrx200.c 	ltq_dma_open(&priv->chan_rx.dma);
dma               133 drivers/net/ethernet/lantiq_xrx200.c 	ltq_dma_enable_irq(&priv->chan_rx.dma);
dma               147 drivers/net/ethernet/lantiq_xrx200.c 	ltq_dma_close(&priv->chan_rx.dma);
dma               150 drivers/net/ethernet/lantiq_xrx200.c 	ltq_dma_close(&priv->chan_tx.dma);
dma               159 drivers/net/ethernet/lantiq_xrx200.c 	ch->skb[ch->dma.desc] = netdev_alloc_skb_ip_align(ch->priv->net_dev,
dma               161 drivers/net/ethernet/lantiq_xrx200.c 	if (!ch->skb[ch->dma.desc]) {
dma               166 drivers/net/ethernet/lantiq_xrx200.c 	ch->dma.desc_base[ch->dma.desc].addr = dma_map_single(ch->priv->dev,
dma               167 drivers/net/ethernet/lantiq_xrx200.c 			ch->skb[ch->dma.desc]->data, XRX200_DMA_DATA_LEN,
dma               170 drivers/net/ethernet/lantiq_xrx200.c 				       ch->dma.desc_base[ch->dma.desc].addr))) {
dma               171 drivers/net/ethernet/lantiq_xrx200.c 		dev_kfree_skb_any(ch->skb[ch->dma.desc]);
dma               177 drivers/net/ethernet/lantiq_xrx200.c 	ch->dma.desc_base[ch->dma.desc].ctl =
dma               187 drivers/net/ethernet/lantiq_xrx200.c 	struct ltq_dma_desc *desc = &ch->dma.desc_base[ch->dma.desc];
dma               188 drivers/net/ethernet/lantiq_xrx200.c 	struct sk_buff *skb = ch->skb[ch->dma.desc];
dma               195 drivers/net/ethernet/lantiq_xrx200.c 	ch->dma.desc++;
dma               196 drivers/net/ethernet/lantiq_xrx200.c 	ch->dma.desc %= LTQ_DESC_NUM;
dma               220 drivers/net/ethernet/lantiq_xrx200.c 		struct ltq_dma_desc *desc = &ch->dma.desc_base[ch->dma.desc];
dma               234 drivers/net/ethernet/lantiq_xrx200.c 		ltq_dma_enable_irq(&ch->dma);
dma               249 drivers/net/ethernet/lantiq_xrx200.c 		struct ltq_dma_desc *desc = &ch->dma.desc_base[ch->tx_free];
dma               258 drivers/net/ethernet/lantiq_xrx200.c 			memset(&ch->dma.desc_base[ch->tx_free], 0,
dma               273 drivers/net/ethernet/lantiq_xrx200.c 		ltq_dma_enable_irq(&ch->dma);
dma               283 drivers/net/ethernet/lantiq_xrx200.c 	struct ltq_dma_desc *desc = &ch->dma.desc_base[ch->dma.desc];
dma               296 drivers/net/ethernet/lantiq_xrx200.c 	if ((desc->ctl & (LTQ_DMA_OWN | LTQ_DMA_C)) || ch->skb[ch->dma.desc]) {
dma               302 drivers/net/ethernet/lantiq_xrx200.c 	ch->skb[ch->dma.desc] = skb;
dma               316 drivers/net/ethernet/lantiq_xrx200.c 	ch->dma.desc++;
dma               317 drivers/net/ethernet/lantiq_xrx200.c 	ch->dma.desc %= LTQ_DESC_NUM;
dma               318 drivers/net/ethernet/lantiq_xrx200.c 	if (ch->dma.desc == ch->tx_free)
dma               344 drivers/net/ethernet/lantiq_xrx200.c 	ltq_dma_disable_irq(&ch->dma);
dma               345 drivers/net/ethernet/lantiq_xrx200.c 	ltq_dma_ack_irq(&ch->dma);
dma               361 drivers/net/ethernet/lantiq_xrx200.c 	ch_rx->dma.nr = XRX200_DMA_RX;
dma               362 drivers/net/ethernet/lantiq_xrx200.c 	ch_rx->dma.dev = priv->dev;
dma               365 drivers/net/ethernet/lantiq_xrx200.c 	ltq_dma_alloc_rx(&ch_rx->dma);
dma               366 drivers/net/ethernet/lantiq_xrx200.c 	for (ch_rx->dma.desc = 0; ch_rx->dma.desc < LTQ_DESC_NUM;
dma               367 drivers/net/ethernet/lantiq_xrx200.c 	     ch_rx->dma.desc++) {
dma               372 drivers/net/ethernet/lantiq_xrx200.c 	ch_rx->dma.desc = 0;
dma               373 drivers/net/ethernet/lantiq_xrx200.c 	ret = devm_request_irq(priv->dev, ch_rx->dma.irq, xrx200_dma_irq, 0,
dma               377 drivers/net/ethernet/lantiq_xrx200.c 			ch_rx->dma.irq);
dma               381 drivers/net/ethernet/lantiq_xrx200.c 	ch_tx->dma.nr = XRX200_DMA_TX;
dma               382 drivers/net/ethernet/lantiq_xrx200.c 	ch_tx->dma.dev = priv->dev;
dma               385 drivers/net/ethernet/lantiq_xrx200.c 	ltq_dma_alloc_tx(&ch_tx->dma);
dma               386 drivers/net/ethernet/lantiq_xrx200.c 	ret = devm_request_irq(priv->dev, ch_tx->dma.irq, xrx200_dma_irq, 0,
dma               390 drivers/net/ethernet/lantiq_xrx200.c 			ch_tx->dma.irq);
dma               397 drivers/net/ethernet/lantiq_xrx200.c 	ltq_dma_free(&ch_tx->dma);
dma               407 drivers/net/ethernet/lantiq_xrx200.c 	ltq_dma_free(&ch_rx->dma);
dma               415 drivers/net/ethernet/lantiq_xrx200.c 	ltq_dma_free(&priv->chan_tx.dma);
dma               416 drivers/net/ethernet/lantiq_xrx200.c 	ltq_dma_free(&priv->chan_rx.dma);
dma               460 drivers/net/ethernet/lantiq_xrx200.c 	priv->chan_rx.dma.irq = platform_get_irq_byname(pdev, "rx");
dma               461 drivers/net/ethernet/lantiq_xrx200.c 	if (priv->chan_rx.dma.irq < 0)
dma               463 drivers/net/ethernet/lantiq_xrx200.c 	priv->chan_tx.dma.irq = platform_get_irq_byname(pdev, "tx");
dma               464 drivers/net/ethernet/lantiq_xrx200.c 	if (priv->chan_tx.dma.irq < 0)
dma              1068 drivers/net/ethernet/marvell/mvpp2/mvpp2.h 	dma_addr_t dma;
dma               288 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c 	tx_buf->dma = mvpp2_txdesc_dma_addr_get(port, tx_desc) +
dma              2240 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c 		if (!IS_TSO_HEADER(txq_pcpu, tx_buf->dma))
dma              2241 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c 			dma_unmap_single(port->dev->dev.parent, tx_buf->dma,
dma              2514 drivers/net/ethernet/marvell/skge.c 	u64 base = skge->dma + (e->desc - skge->mem);
dma              2549 drivers/net/ethernet/marvell/skge.c 	skge->mem = pci_alloc_consistent(hw->pdev, skge->mem_size, &skge->dma);
dma              2553 drivers/net/ethernet/marvell/skge.c 	BUG_ON(skge->dma & 7);
dma              2555 drivers/net/ethernet/marvell/skge.c 	if (upper_32_bits(skge->dma) != upper_32_bits(skge->dma + skge->mem_size)) {
dma              2561 drivers/net/ethernet/marvell/skge.c 	err = skge_ring_alloc(&skge->rx_ring, skge->mem, skge->dma);
dma              2570 drivers/net/ethernet/marvell/skge.c 			      skge->dma + rx_size);
dma              2627 drivers/net/ethernet/marvell/skge.c 	pci_free_consistent(hw->pdev, skge->mem_size, skge->mem, skge->dma);
dma              2717 drivers/net/ethernet/marvell/skge.c 	pci_free_consistent(hw->pdev, skge->mem_size, skge->mem, skge->dma);
dma              2473 drivers/net/ethernet/marvell/skge.h 	dma_addr_t	     dma;
dma               823 drivers/net/ethernet/mediatek/mtk_eth_soc.c 	void *ret = ring->dma;
dma               831 drivers/net/ethernet/mediatek/mtk_eth_soc.c 	int idx = txd - ring->dma;
dma               837 drivers/net/ethernet/mediatek/mtk_eth_soc.c 				       struct mtk_tx_dma *dma)
dma               839 drivers/net/ethernet/mediatek/mtk_eth_soc.c 	return ring->dma_pdma - ring->dma + dma;
dma               842 drivers/net/ethernet/mediatek/mtk_eth_soc.c static int txd_to_idx(struct mtk_tx_ring *ring, struct mtk_tx_dma *dma)
dma               844 drivers/net/ethernet/mediatek/mtk_eth_soc.c 	return ((void *)dma - (void *)ring->dma) / sizeof(*dma);
dma              1197 drivers/net/ethernet/mediatek/mtk_eth_soc.c 		if (ring->dma[idx].rxd2 & RX_DMA_DONE) {
dma              1246 drivers/net/ethernet/mediatek/mtk_eth_soc.c 		rxd = &ring->dma[idx];
dma              1348 drivers/net/ethernet/mediatek/mtk_eth_soc.c 	u32 cpu, dma;
dma              1351 drivers/net/ethernet/mediatek/mtk_eth_soc.c 	dma = mtk_r32(eth, MTK_QTX_DRX_PTR);
dma              1355 drivers/net/ethernet/mediatek/mtk_eth_soc.c 	while ((cpu != dma) && budget) {
dma              1396 drivers/net/ethernet/mediatek/mtk_eth_soc.c 	u32 cpu, dma;
dma              1399 drivers/net/ethernet/mediatek/mtk_eth_soc.c 	dma = mtk_r32(eth, MT7628_TX_DTX_IDX0);
dma              1401 drivers/net/ethernet/mediatek/mtk_eth_soc.c 	while ((cpu != dma) && budget) {
dma              1415 drivers/net/ethernet/mediatek/mtk_eth_soc.c 		desc = &ring->dma[cpu];
dma              1536 drivers/net/ethernet/mediatek/mtk_eth_soc.c 	int i, sz = sizeof(*ring->dma);
dma              1543 drivers/net/ethernet/mediatek/mtk_eth_soc.c 	ring->dma = dma_alloc_coherent(eth->dev, MTK_DMA_SIZE * sz,
dma              1545 drivers/net/ethernet/mediatek/mtk_eth_soc.c 	if (!ring->dma)
dma              1552 drivers/net/ethernet/mediatek/mtk_eth_soc.c 		ring->dma[i].txd2 = next_ptr;
dma              1553 drivers/net/ethernet/mediatek/mtk_eth_soc.c 		ring->dma[i].txd3 = TX_DMA_LS0 | TX_DMA_OWNER_CPU;
dma              1575 drivers/net/ethernet/mediatek/mtk_eth_soc.c 	ring->next_free = &ring->dma[0];
dma              1576 drivers/net/ethernet/mediatek/mtk_eth_soc.c 	ring->last_free = &ring->dma[MTK_DMA_SIZE - 1];
dma              1620 drivers/net/ethernet/mediatek/mtk_eth_soc.c 	if (ring->dma) {
dma              1622 drivers/net/ethernet/mediatek/mtk_eth_soc.c 				  MTK_DMA_SIZE * sizeof(*ring->dma),
dma              1623 drivers/net/ethernet/mediatek/mtk_eth_soc.c 				  ring->dma,
dma              1625 drivers/net/ethernet/mediatek/mtk_eth_soc.c 		ring->dma = NULL;
dma              1674 drivers/net/ethernet/mediatek/mtk_eth_soc.c 	ring->dma = dma_alloc_coherent(eth->dev,
dma              1675 drivers/net/ethernet/mediatek/mtk_eth_soc.c 				       rx_dma_size * sizeof(*ring->dma),
dma              1677 drivers/net/ethernet/mediatek/mtk_eth_soc.c 	if (!ring->dma)
dma              1687 drivers/net/ethernet/mediatek/mtk_eth_soc.c 		ring->dma[i].rxd1 = (unsigned int)dma_addr;
dma              1690 drivers/net/ethernet/mediatek/mtk_eth_soc.c 			ring->dma[i].rxd2 = RX_DMA_LSO;
dma              1692 drivers/net/ethernet/mediatek/mtk_eth_soc.c 			ring->dma[i].rxd2 = RX_DMA_PLEN0(ring->buf_size);
dma              1715 drivers/net/ethernet/mediatek/mtk_eth_soc.c 	if (ring->data && ring->dma) {
dma              1719 drivers/net/ethernet/mediatek/mtk_eth_soc.c 			if (!ring->dma[i].rxd1)
dma              1722 drivers/net/ethernet/mediatek/mtk_eth_soc.c 					 ring->dma[i].rxd1,
dma              1731 drivers/net/ethernet/mediatek/mtk_eth_soc.c 	if (ring->dma) {
dma              1733 drivers/net/ethernet/mediatek/mtk_eth_soc.c 				  ring->dma_size * sizeof(*ring->dma),
dma              1734 drivers/net/ethernet/mediatek/mtk_eth_soc.c 				  ring->dma,
dma              1736 drivers/net/ethernet/mediatek/mtk_eth_soc.c 		ring->dma = NULL;
dma               623 drivers/net/ethernet/mediatek/mtk_eth_soc.h 	struct mtk_tx_dma *dma;
dma               652 drivers/net/ethernet/mediatek/mtk_eth_soc.h 	struct mtk_rx_dma *dma;
dma               709 drivers/net/ethernet/mellanox/mlx4/alloc.c 	db->dma     = pgdir->db_dma  + db->index * 4;
dma               853 drivers/net/ethernet/mellanox/mlx4/cmd.c 	err = mlx4_cmd_box(dev, inbox->dma, outbox->dma, port, 3,
dma               949 drivers/net/ethernet/mellanox/mlx4/cmd.c 				err = mlx4_cmd_box(dev, inbox->dma, outbox->dma,
dma               972 drivers/net/ethernet/mellanox/mlx4/cmd.c 							   inbox->dma,
dma               973 drivers/net/ethernet/mellanox/mlx4/cmd.c 							   outbox->dma,
dma               993 drivers/net/ethernet/mellanox/mlx4/cmd.c 				err = mlx4_cmd_box(dev, inbox->dma, outbox->dma,
dma              1020 drivers/net/ethernet/mellanox/mlx4/cmd.c 	return mlx4_cmd_box(dev, inbox->dma, outbox->dma,
dma              1044 drivers/net/ethernet/mellanox/mlx4/cmd.c 	in_param = cmd->has_inbox ? (u64) inbox->dma : vhcr->in_param;
dma              1045 drivers/net/ethernet/mellanox/mlx4/cmd.c 	out_param = cmd->has_outbox ? (u64) outbox->dma : vhcr->out_param;
dma              1730 drivers/net/ethernet/mellanox/mlx4/cmd.c 		ret = mlx4_ACCESS_MEM(dev, inbox->dma, slave,
dma              1768 drivers/net/ethernet/mellanox/mlx4/cmd.c 		in_param = cmd->has_inbox ? (u64) inbox->dma :
dma              1770 drivers/net/ethernet/mellanox/mlx4/cmd.c 		out_param = cmd->has_outbox ? (u64) outbox->dma :
dma              1803 drivers/net/ethernet/mellanox/mlx4/cmd.c 		ret = mlx4_ACCESS_MEM(dev, outbox->dma, slave,
dma              2702 drivers/net/ethernet/mellanox/mlx4/cmd.c 				       &mailbox->dma);
dma              2718 drivers/net/ethernet/mellanox/mlx4/cmd.c 	dma_pool_free(mlx4_priv(dev)->cmd.pool, mailbox->buf, mailbox->dma);
dma              3310 drivers/net/ethernet/mellanox/mlx4/cmd.c 	err = mlx4_cmd_box(dev, 0, mailbox->dma,
dma               149 drivers/net/ethernet/mellanox/mlx4/cq.c 	return mlx4_cmd(dev, mailbox->dma, cq_num, opmod,
dma               157 drivers/net/ethernet/mellanox/mlx4/cq.c 	return mlx4_cmd(dev, mailbox->dma, cq_num, opmod, MLX4_CMD_MODIFY_CQ,
dma               164 drivers/net/ethernet/mellanox/mlx4/cq.c 	return mlx4_cmd_box(dev, 0, mailbox ? mailbox->dma : 0,
dma               142 drivers/net/ethernet/mellanox/mlx4/en_cq.c 			    &mdev->priv_uar, cq->wqres.db.dma, &cq->mcq,
dma               565 drivers/net/ethernet/mellanox/mlx4/en_dcb_nl.c 				   mailbox_out->dma,
dma               623 drivers/net/ethernet/mellanox/mlx4/en_dcb_nl.c 	mailbox_in_dma = mailbox_in->dma;
dma               692 drivers/net/ethernet/mellanox/mlx4/en_dcb_nl.c 				   mailbox_out->dma, inmod,
dma                66 drivers/net/ethernet/mellanox/mlx4/en_port.c 	err = mlx4_cmd(dev, mailbox->dma, priv->port, 0, MLX4_CMD_SET_VLAN_FLTR,
dma                83 drivers/net/ethernet/mellanox/mlx4/en_port.c 	err = mlx4_cmd_box(mdev->dev, 0, mailbox->dma, port, 0,
dma               208 drivers/net/ethernet/mellanox/mlx4/en_port.c 	err = mlx4_cmd_box(mdev->dev, 0, mailbox->dma, in_mod, 0,
dma               228 drivers/net/ethernet/mellanox/mlx4/en_port.c 		err = mlx4_cmd_box(mdev->dev, 0, mailbox_priority->dma,
dma                83 drivers/net/ethernet/mellanox/mlx4/en_resources.c 	context->db_rec_addr = cpu_to_be64(priv->res.db.dma << 2);
dma                58 drivers/net/ethernet/mellanox/mlx4/en_rx.c 	dma_addr_t dma;
dma                63 drivers/net/ethernet/mellanox/mlx4/en_rx.c 	dma = dma_map_page(priv->ddev, page, 0, PAGE_SIZE, priv->dma_dir);
dma                64 drivers/net/ethernet/mellanox/mlx4/en_rx.c 	if (unlikely(dma_mapping_error(priv->ddev, dma))) {
dma                69 drivers/net/ethernet/mellanox/mlx4/en_rx.c 	frag->dma = dma;
dma                88 drivers/net/ethernet/mellanox/mlx4/en_rx.c 		rx_desc->data[i].addr = cpu_to_be64(frags->dma +
dma                98 drivers/net/ethernet/mellanox/mlx4/en_rx.c 		dma_unmap_page(priv->ddev, frag->dma,
dma               146 drivers/net/ethernet/mellanox/mlx4/en_rx.c 			frags->dma  = ring->page_cache.buf[ring->page_cache.index].dma;
dma               149 drivers/net/ethernet/mellanox/mlx4/en_rx.c 		rx_desc->data[0].addr = cpu_to_be64(frags->dma +
dma               424 drivers/net/ethernet/mellanox/mlx4/en_rx.c 	cache->buf[cache->index].dma = frame->dma;
dma               456 drivers/net/ethernet/mellanox/mlx4/en_rx.c 		dma_unmap_page(priv->ddev, ring->page_cache.buf[i].dma,
dma               477 drivers/net/ethernet/mellanox/mlx4/en_rx.c 	dma_addr_t dma;
dma               487 drivers/net/ethernet/mellanox/mlx4/en_rx.c 		dma = frags->dma;
dma               488 drivers/net/ethernet/mellanox/mlx4/en_rx.c 		dma_sync_single_range_for_cpu(priv->ddev, dma, frags->page_offset,
dma               511 drivers/net/ethernet/mellanox/mlx4/en_rx.c 			dma_unmap_page(priv->ddev, dma, PAGE_SIZE, priv->dma_dir);
dma               731 drivers/net/ethernet/mellanox/mlx4/en_rx.c 			dma_addr_t dma;
dma               735 drivers/net/ethernet/mellanox/mlx4/en_rx.c 			dma = frags[0].dma + frags[0].page_offset;
dma               736 drivers/net/ethernet/mellanox/mlx4/en_rx.c 			dma_sync_single_for_cpu(priv->ddev, dma, sizeof(*ethh),
dma               770 drivers/net/ethernet/mellanox/mlx4/en_rx.c 			dma_addr_t dma;
dma               774 drivers/net/ethernet/mellanox/mlx4/en_rx.c 			dma = frags[0].dma + frags[0].page_offset;
dma               775 drivers/net/ethernet/mellanox/mlx4/en_rx.c 			dma_sync_single_for_cpu(priv->ddev, dma,
dma              1079 drivers/net/ethernet/mellanox/mlx4/en_rx.c 	context->db_rec_addr = cpu_to_be64(ring->wqres.db.dma);
dma               350 drivers/net/ethernet/mellanox/mlx4/en_tx.c 		.dma = tx_info->map0_dma,
dma               781 drivers/net/ethernet/mellanox/mlx4/en_tx.c 	dma_addr_t dma = 0;
dma               789 drivers/net/ethernet/mellanox/mlx4/en_tx.c 		dma = skb_frag_dma_map(ddev, frag,
dma               792 drivers/net/ethernet/mellanox/mlx4/en_tx.c 		if (dma_mapping_error(ddev, dma))
dma               795 drivers/net/ethernet/mellanox/mlx4/en_tx.c 		data->addr = cpu_to_be64(dma);
dma               806 drivers/net/ethernet/mellanox/mlx4/en_tx.c 		dma = dma_map_single(ddev, skb->data +
dma               809 drivers/net/ethernet/mellanox/mlx4/en_tx.c 		if (dma_mapping_error(ddev, dma))
dma               812 drivers/net/ethernet/mellanox/mlx4/en_tx.c 		data->addr = cpu_to_be64(dma);
dma               818 drivers/net/ethernet/mellanox/mlx4/en_tx.c 	tx_info->map0_dma = dma;
dma              1130 drivers/net/ethernet/mellanox/mlx4/en_tx.c 	dma_addr_t dma;
dma              1152 drivers/net/ethernet/mellanox/mlx4/en_tx.c 	dma = frame->dma;
dma              1156 drivers/net/ethernet/mellanox/mlx4/en_tx.c 	tx_info->map0_dma = dma;
dma              1159 drivers/net/ethernet/mellanox/mlx4/en_tx.c 	dma_sync_single_range_for_device(priv->ddev, dma, frame->page_offset,
dma              1162 drivers/net/ethernet/mellanox/mlx4/en_tx.c 	data->addr = cpu_to_be64(dma + frame->page_offset);
dma               912 drivers/net/ethernet/mellanox/mlx4/eq.c 	return mlx4_cmd(dev, mailbox->dma, eq_num, 0,
dma               197 drivers/net/ethernet/mellanox/mlx4/fw.c 	err = mlx4_cmd(dev, mailbox->dma, 0, 0, MLX4_CMD_MOD_STAT_CFG,
dma               228 drivers/net/ethernet/mellanox/mlx4/fw.c 	err = mlx4_cmd_box(dev, 0, mailbox->dma, in_modifier, 0,
dma               559 drivers/net/ethernet/mellanox/mlx4/fw.c 	err = mlx4_cmd_box(dev, 0, mailbox->dma, in_modifier, op_modifier,
dma               837 drivers/net/ethernet/mellanox/mlx4/fw.c 	err = mlx4_cmd_box(dev, 0, mailbox->dma, 0, 0, MLX4_CMD_QUERY_DEV_CAP,
dma              1197 drivers/net/ethernet/mellanox/mlx4/fw.c 		err = mlx4_cmd_box(dev, 0, mailbox->dma, 0, 0, MLX4_CMD_QUERY_DEV_CAP,
dma              1226 drivers/net/ethernet/mellanox/mlx4/fw.c 		err = mlx4_cmd_box(dev, 0, mailbox->dma, port, 0, MLX4_CMD_QUERY_PORT,
dma              1285 drivers/net/ethernet/mellanox/mlx4/fw.c 	err = mlx4_cmd_box(dev, 0, outbox->dma, 0, 0, MLX4_CMD_QUERY_DEV_CAP,
dma              1434 drivers/net/ethernet/mellanox/mlx4/fw.c 	err = mlx4_cmd_box(dev, 0, outbox->dma, vhcr->in_modifier, 0,
dma              1496 drivers/net/ethernet/mellanox/mlx4/fw.c 	err =  mlx4_cmd_box(dev, 0, mailbox->dma, port, 0,
dma              1563 drivers/net/ethernet/mellanox/mlx4/fw.c 				err = mlx4_cmd(dev, mailbox->dma, nent, 0, op,
dma              1574 drivers/net/ethernet/mellanox/mlx4/fw.c 		err = mlx4_cmd(dev, mailbox->dma, nent, 0, op,
dma              1650 drivers/net/ethernet/mellanox/mlx4/fw.c 	err = mlx4_cmd_box(dev, 0, mailbox->dma, 0, 0, MLX4_CMD_QUERY_FW,
dma              1750 drivers/net/ethernet/mellanox/mlx4/fw.c 	err = mlx4_cmd_box(dev, 0, outbox->dma, 0, 0, MLX4_CMD_QUERY_FW,
dma              1815 drivers/net/ethernet/mellanox/mlx4/fw.c 	err = mlx4_cmd_box(dev, 0, mailbox->dma, 0, 0, MLX4_CMD_QUERY_ADAPTER,
dma              2055 drivers/net/ethernet/mellanox/mlx4/fw.c 	err = mlx4_cmd(dev, mailbox->dma, 0, 0, MLX4_CMD_INIT_HCA,
dma              2090 drivers/net/ethernet/mellanox/mlx4/fw.c 	err = mlx4_cmd_box(dev, 0, mailbox->dma, 0, 0,
dma              2220 drivers/net/ethernet/mellanox/mlx4/fw.c 	err = mlx4_cmd_box(dev, 0, mailbox->dma, 0, 0,
dma              2333 drivers/net/ethernet/mellanox/mlx4/fw.c 		err = mlx4_cmd(dev, mailbox->dma, port, 0, MLX4_CMD_INIT_PORT,
dma              2434 drivers/net/ethernet/mellanox/mlx4/fw.c 	err = mlx4_cmd(dev, mailbox->dma, 0, 0, MLX4_CMD_CONFIG_DEV,
dma              2450 drivers/net/ethernet/mellanox/mlx4/fw.c 	err = mlx4_cmd_box(dev, 0, mailbox->dma, 0, 1, MLX4_CMD_CONFIG_DEV,
dma              2567 drivers/net/ethernet/mellanox/mlx4/fw.c 	err = mlx4_cmd(dev, mailbox->dma, 0,
dma              2616 drivers/net/ethernet/mellanox/mlx4/fw.c 	ret = mlx4_cmd_box(dev, 0, mailbox->dma, port, op_modifier,
dma              2656 drivers/net/ethernet/mellanox/mlx4/fw.c 		err = mlx4_cmd_box(dev, 0, mailbox->dma, in_mod, 0x2,
dma              2731 drivers/net/ethernet/mellanox/mlx4/fw.c 		err = mlx4_cmd_box(dev, 0, mailbox->dma, 0, 0,
dma              2851 drivers/net/ethernet/mellanox/mlx4/fw.c 	err = mlx4_cmd_box(dev, 0, mailbox->dma, 0x01 /* subn mgmt class */,
dma              2864 drivers/net/ethernet/mellanox/mlx4/fw.c 	err = mlx4_cmd(dev, mailbox->dma, 0x01 /* subn mgmt class */,
dma              2944 drivers/net/ethernet/mellanox/mlx4/fw.c 	err = mlx4_cmd_box(dev, inbox->dma, outbox->dma, 0, 0,
dma              3013 drivers/net/ethernet/mellanox/mlx4/fw.c 	return mlx4_cmd_box(dev, inbox->dma, outbox->dma, vhcr->in_modifier,
dma              3038 drivers/net/ethernet/mellanox/mlx4/fw.c 	err = mlx4_cmd(dev, mailbox->dma, in_mod, MLX4_SET_PORT_ETH_OPCODE,
dma               103 drivers/net/ethernet/mellanox/mlx4/fw_qos.c 	err = mlx4_cmd(dev, mailbox->dma, in_mod, 1, MLX4_CMD_SET_PORT,
dma               151 drivers/net/ethernet/mellanox/mlx4/fw_qos.c 	err = mlx4_cmd(dev, mailbox->dma, in_mod, 1, MLX4_CMD_SET_PORT,
dma               173 drivers/net/ethernet/mellanox/mlx4/fw_qos.c 	err = mlx4_cmd_box(dev, 0, mailbox->dma, port,
dma               210 drivers/net/ethernet/mellanox/mlx4/fw_qos.c 	err = mlx4_cmd(dev, mailbox->dma, port,
dma               235 drivers/net/ethernet/mellanox/mlx4/fw_qos.c 	err = mlx4_cmd_box(dev, 0, mailbox->dma, (vport << 8) | port,
dma               280 drivers/net/ethernet/mellanox/mlx4/fw_qos.c 	err = mlx4_cmd(dev, mailbox->dma, (vport << 8) | port,
dma              2045 drivers/net/ethernet/mellanox/mlx4/main.c 	u64 dma = (u64) priv->mfunc.vhcr_dma;
dma              2089 drivers/net/ethernet/mellanox/mlx4/main.c 	if (mlx4_comm_cmd(dev, MLX4_COMM_CMD_VHCR0, dma >> 48,
dma              2092 drivers/net/ethernet/mellanox/mlx4/main.c 	if (mlx4_comm_cmd(dev, MLX4_COMM_CMD_VHCR1, dma >> 32,
dma              2095 drivers/net/ethernet/mellanox/mlx4/main.c 	if (mlx4_comm_cmd(dev, MLX4_COMM_CMD_VHCR2, dma >> 16,
dma              2098 drivers/net/ethernet/mellanox/mlx4/main.c 	if (mlx4_comm_cmd(dev, MLX4_COMM_CMD_VHCR_EN, dma,
dma              2623 drivers/net/ethernet/mellanox/mlx4/main.c 	err = mlx4_cmd_box(dev, 0, if_stat_mailbox->dma, if_stat_in_mod, 0,
dma                61 drivers/net/ethernet/mellanox/mlx4/mcg.c 	err = mlx4_cmd_imm(dev, mailbox->dma, &imm, size, 0,
dma                85 drivers/net/ethernet/mellanox/mlx4/mcg.c 	return mlx4_cmd_box(dev, 0, mailbox->dma, index, 0, MLX4_CMD_READ_MCG,
dma                92 drivers/net/ethernet/mellanox/mlx4/mcg.c 	return mlx4_cmd(dev, mailbox->dma, index, 0, MLX4_CMD_WRITE_MCG,
dma               102 drivers/net/ethernet/mellanox/mlx4/mcg.c 	return mlx4_cmd(dev, mailbox->dma, in_mod, 0x1,
dma               113 drivers/net/ethernet/mellanox/mlx4/mcg.c 	err = mlx4_cmd_imm(dev, mailbox->dma, &imm, 0, op_mod,
dma              1356 drivers/net/ethernet/mellanox/mlx4/mcg.c 	err = mlx4_cmd(dev, mailbox->dma, qpn, attach,
dma               260 drivers/net/ethernet/mellanox/mlx4/mlx4_en.h 	dma_addr_t	dma;
dma               270 drivers/net/ethernet/mellanox/mlx4/mlx4_en.h 		dma_addr_t	dma;
dma               279 drivers/net/ethernet/mellanox/mlx4/mr.c 	return mlx4_cmd(dev, mailbox->dma, mpt_index,
dma               287 drivers/net/ethernet/mellanox/mlx4/mr.c 	return mlx4_cmd_box(dev, 0, mailbox ? mailbox->dma : 0, mpt_index,
dma               321 drivers/net/ethernet/mellanox/mlx4/mr.c 		err = mlx4_cmd_box(dev, 0, mailbox->dma, key,
dma               435 drivers/net/ethernet/mellanox/mlx4/mr.c 	return mlx4_cmd(dev, mailbox->dma, num_entries, 0, MLX4_CMD_WRITE_MTT,
dma               142 drivers/net/ethernet/mellanox/mlx4/port.c 	err = mlx4_cmd(dev, mailbox->dma, in_mod, MLX4_SET_PORT_ETH_OPCODE,
dma               549 drivers/net/ethernet/mellanox/mlx4/port.c 	err = mlx4_cmd(dev, mailbox->dma, in_mod, MLX4_SET_PORT_ETH_OPCODE,
dma              1081 drivers/net/ethernet/mellanox/mlx4/port.c 	err = mlx4_cmd_box(dev, inmailbox->dma, outmailbox->dma, port, 3,
dma              1195 drivers/net/ethernet/mellanox/mlx4/port.c 	err = mlx4_cmd(dev, mailbox->dma,
dma              1469 drivers/net/ethernet/mellanox/mlx4/port.c 			err = mlx4_cmd(dev, inbox->dma, in_mod & 0xffff, op_mod,
dma              1476 drivers/net/ethernet/mellanox/mlx4/port.c 		return mlx4_cmd(dev, inbox->dma, in_mod & 0xffff, op_mod,
dma              1532 drivers/net/ethernet/mellanox/mlx4/port.c 	err = mlx4_cmd(dev, inbox->dma, port, is_eth, MLX4_CMD_SET_PORT,
dma              1595 drivers/net/ethernet/mellanox/mlx4/port.c 		err = mlx4_cmd(dev, mailbox->dma, port,
dma              1633 drivers/net/ethernet/mellanox/mlx4/port.c 	err = mlx4_cmd(dev, mailbox->dma, in_mod, MLX4_SET_PORT_ETH_OPCODE,
dma              1671 drivers/net/ethernet/mellanox/mlx4/port.c 	err = mlx4_cmd(dev, mailbox->dma, in_mod, MLX4_SET_PORT_ETH_OPCODE,
dma              1695 drivers/net/ethernet/mellanox/mlx4/port.c 	err = mlx4_cmd(dev, mailbox->dma, in_mod, MLX4_SET_PORT_ETH_OPCODE,
dma              1719 drivers/net/ethernet/mellanox/mlx4/port.c 	err = mlx4_cmd(dev, mailbox->dma, in_mod, MLX4_SET_PORT_ETH_OPCODE,
dma              1746 drivers/net/ethernet/mellanox/mlx4/port.c 	err = mlx4_cmd(dev, mailbox->dma, in_mod, 1, MLX4_CMD_SET_PORT,
dma              1788 drivers/net/ethernet/mellanox/mlx4/port.c 	err = mlx4_cmd(dev, mailbox->dma, in_mod, MLX4_SET_PORT_ETH_OPCODE,
dma              1808 drivers/net/ethernet/mellanox/mlx4/port.c 	err = mlx4_cmd(dev, mailbox->dma, port, MLX4_SET_PORT_BEACON_OPCODE,
dma              2087 drivers/net/ethernet/mellanox/mlx4/port.c 	ret = mlx4_cmd_box(dev, inbox->dma, outbox->dma, port, 3,
dma               182 drivers/net/ethernet/mellanox/mlx4/qp.c 	ret = mlx4_cmd(dev, mailbox->dma,
dma               504 drivers/net/ethernet/mellanox/mlx4/qp.c 	err = mlx4_cmd(dev, mailbox->dma, qpn & 0xffffff, 0,
dma               903 drivers/net/ethernet/mellanox/mlx4/qp.c 	err = mlx4_cmd_box(dev, 0, mailbox->dma, qp->qpn, 0,
dma              3389 drivers/net/ethernet/mellanox/mlx4/resource_tracker.c 	err = mlx4_cmd(dev, mailbox->dma, in_modifier, 0,
dma              4335 drivers/net/ethernet/mellanox/mlx4/resource_tracker.c 	err = mlx4_cmd(dev, inbox->dma,
dma              4431 drivers/net/ethernet/mellanox/mlx4/resource_tracker.c 	err = mlx4_cmd_imm(dev, inbox->dma, &vhcr->out_param,
dma              4992 drivers/net/ethernet/mellanox/mlx4/resource_tracker.c 	err = mlx4_cmd_imm(dev, mailbox->dma, &reg_id, fs_rule->mirr_mbox_size >> 2, 0,
dma              5392 drivers/net/ethernet/mellanox/mlx4/resource_tracker.c 			err = mlx4_cmd(dev, mailbox->dma,
dma                67 drivers/net/ethernet/mellanox/mlx4/srq.c 	return mlx4_cmd(dev, mailbox->dma, srq_num, 0,
dma                75 drivers/net/ethernet/mellanox/mlx4/srq.c 	return mlx4_cmd_box(dev, 0, mailbox ? mailbox->dma : 0, srq_num,
dma                89 drivers/net/ethernet/mellanox/mlx4/srq.c 	return mlx4_cmd_box(dev, 0, mailbox->dma, srq_num, 0, MLX4_CMD_QUERY_SRQ,
dma               226 drivers/net/ethernet/mellanox/mlx5/core/alloc.c 	db->dma     = pgdir->db_dma  + offset;
dma               898 drivers/net/ethernet/mellanox/mlx5/core/cmd.c 		lay->in_ptr = cpu_to_be64(ent->in->next->dma);
dma               901 drivers/net/ethernet/mellanox/mlx5/core/cmd.c 		lay->out_ptr = cpu_to_be64(ent->out->next->dma);
dma              1193 drivers/net/ethernet/mellanox/mlx5/core/cmd.c 				       &mailbox->dma);
dma              1207 drivers/net/ethernet/mellanox/mlx5/core/cmd.c 	dma_pool_free(dev->cmd.pool, mailbox->buf, mailbox->dma);
dma              1239 drivers/net/ethernet/mellanox/mlx5/core/cmd.c 		block->next = cpu_to_be64(tmp->next ? tmp->next->dma : 0);
dma              1884 drivers/net/ethernet/mellanox/mlx5/core/cmd.c 		cmd->dma = cmd->alloc_dma;
dma              1898 drivers/net/ethernet/mellanox/mlx5/core/cmd.c 	cmd->dma = ALIGN(cmd->alloc_dma, MLX5_ADAPTER_PAGE_SIZE);
dma              1974 drivers/net/ethernet/mellanox/mlx5/core/cmd.c 	cmd_h = (u32)((u64)(cmd->dma) >> 32);
dma              1975 drivers/net/ethernet/mellanox/mlx5/core/cmd.c 	cmd_l = (u32)(cmd->dma);
dma              1988 drivers/net/ethernet/mellanox/mlx5/core/cmd.c 	mlx5_core_dbg(dev, "descriptor at dma 0x%llx\n", (unsigned long long)(cmd->dma));
dma               128 drivers/net/ethernet/mellanox/mlx5/core/diag/fw_tracer.c 	dma_addr_t dma;
dma               145 drivers/net/ethernet/mellanox/mlx5/core/diag/fw_tracer.c 	dma = dma_map_single(ddev, buff, tracer->buff.size, DMA_FROM_DEVICE);
dma               146 drivers/net/ethernet/mellanox/mlx5/core/diag/fw_tracer.c 	if (dma_mapping_error(ddev, dma)) {
dma               148 drivers/net/ethernet/mellanox/mlx5/core/diag/fw_tracer.c 			       dma_mapping_error(ddev, dma));
dma               152 drivers/net/ethernet/mellanox/mlx5/core/diag/fw_tracer.c 	tracer->buff.dma = dma;
dma               170 drivers/net/ethernet/mellanox/mlx5/core/diag/fw_tracer.c 	dma_unmap_single(ddev, tracer->buff.dma, tracer->buff.size, DMA_FROM_DEVICE);
dma               193 drivers/net/ethernet/mellanox/mlx5/core/diag/fw_tracer.c 		mtt[i] = cpu_to_be64(tracer->buff.dma + i * PAGE_SIZE);
dma               205 drivers/net/ethernet/mellanox/mlx5/core/diag/fw_tracer.c 	MLX5_SET64(mkc, mkc, start_addr, tracer->buff.dma);
dma                90 drivers/net/ethernet/mellanox/mlx5/core/diag/fw_tracer.h 		dma_addr_t dma;
dma               160 drivers/net/ethernet/mellanox/mlx5/core/en/txrx.h 	struct mlx5e_sq_dma *dma = mlx5e_dma_get(sq, sq->dma_fifo_pc++);
dma               162 drivers/net/ethernet/mellanox/mlx5/core/en/txrx.h 	dma->addr = addr;
dma               163 drivers/net/ethernet/mellanox/mlx5/core/en/txrx.h 	dma->size = size;
dma               164 drivers/net/ethernet/mellanox/mlx5/core/en/txrx.h 	dma->type = map_type;
dma               168 drivers/net/ethernet/mellanox/mlx5/core/en/txrx.h mlx5e_tx_dma_unmap(struct device *pdev, struct mlx5e_sq_dma *dma)
dma               170 drivers/net/ethernet/mellanox/mlx5/core/en/txrx.h 	switch (dma->type) {
dma               172 drivers/net/ethernet/mellanox/mlx5/core/en/txrx.h 		dma_unmap_single(pdev, dma->addr, dma->size, DMA_TO_DEVICE);
dma               175 drivers/net/ethernet/mellanox/mlx5/core/en/txrx.h 		dma_unmap_page(pdev, dma->addr, dma->size, DMA_TO_DEVICE);
dma                16 drivers/net/ethernet/mellanox/mlx5/core/en/xsk/umem.c 		dma_addr_t dma = dma_map_page(dev, umem->pgs[i], 0, PAGE_SIZE,
dma                19 drivers/net/ethernet/mellanox/mlx5/core/en/xsk/umem.c 		if (unlikely(dma_mapping_error(dev, dma)))
dma                21 drivers/net/ethernet/mellanox/mlx5/core/en/xsk/umem.c 		umem->pages[i].dma = dma;
dma                28 drivers/net/ethernet/mellanox/mlx5/core/en/xsk/umem.c 		dma_unmap_page(dev, umem->pages[i].dma, PAGE_SIZE,
dma                30 drivers/net/ethernet/mellanox/mlx5/core/en/xsk/umem.c 		umem->pages[i].dma = 0;
dma                43 drivers/net/ethernet/mellanox/mlx5/core/en/xsk/umem.c 		dma_unmap_page(dev, umem->pages[i].dma, PAGE_SIZE,
dma                45 drivers/net/ethernet/mellanox/mlx5/core/en/xsk/umem.c 		umem->pages[i].dma = 0;
dma               315 drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c 	struct mlx5e_sq_dma *dma;
dma               320 drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c 	dma = mlx5e_dma_get(sq, (*dma_fifo_cc)++);
dma               323 drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c 	mlx5e_tx_dma_unmap(sq->pdev, dma);
dma               700 drivers/net/ethernet/mellanox/mlx5/core/en_main.c 	MLX5_SET64(wq, wq,  dbr_addr,		rq->wq_ctrl.db.dma);
dma              1238 drivers/net/ethernet/mellanox/mlx5/core/en_main.c 	MLX5_SET64(wq, wq, dbr_addr,      csp->wq_ctrl->db.dma);
dma              1632 drivers/net/ethernet/mellanox/mlx5/core/en_main.c 	MLX5_SET64(cqc, cqc, dbr_addr,      cq->wq_ctrl.db.dma);
dma               501 drivers/net/ethernet/mellanox/mlx5/core/en_tx.c 				struct mlx5e_sq_dma *dma =
dma               504 drivers/net/ethernet/mellanox/mlx5/core/en_tx.c 				mlx5e_tx_dma_unmap(sq->pdev, dma);
dma               560 drivers/net/ethernet/mellanox/mlx5/core/en_tx.c 			struct mlx5e_sq_dma *dma =
dma               563 drivers/net/ethernet/mellanox/mlx5/core/en_tx.c 			mlx5e_tx_dma_unmap(sq->pdev, dma);
dma               478 drivers/net/ethernet/mellanox/mlx5/core/fpga/conn.c 	MLX5_SET64(cqc, cqc, dbr_addr, conn->cq.wq_ctrl.db.dma);
dma               596 drivers/net/ethernet/mellanox/mlx5/core/fpga/conn.c 	MLX5_SET64(qpc, qpc, dbr_addr, conn->qp.wq_ctrl.db.dma);
dma               701 drivers/net/ethernet/mellanox/mlx5/core/fpga/conn.c 	MLX5_SET64(qpc, qpc, dbr_addr, conn->qp.wq_ctrl.db.dma);
dma               176 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_send.c 	MLX5_SET64(qpc, qpc, dbr_addr, dr_qp->wq_ctrl.db.dma);
dma               753 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_send.c 	MLX5_SET64(cqc, cqc, dbr_addr, cq->wq_ctrl.db.dma);
dma               966 drivers/net/ethernet/micrel/ksz884x.c 	dma_addr_t dma;
dma              4434 drivers/net/ethernet/micrel/ksz884x.c 	pci_unmap_single(adapter->pdev, dma_buf->dma, dma_buf->len, direction);
dma              4437 drivers/net/ethernet/micrel/ksz884x.c 	dma_buf->dma = 0;
dma              4463 drivers/net/ethernet/micrel/ksz884x.c 		if (dma_buf->skb && !dma_buf->dma)
dma              4464 drivers/net/ethernet/micrel/ksz884x.c 			dma_buf->dma = pci_map_single(
dma              4471 drivers/net/ethernet/micrel/ksz884x.c 		set_rx_buf(desc, dma_buf->dma);
dma              4660 drivers/net/ethernet/micrel/ksz884x.c 		dma_buf->dma = pci_map_single(
dma              4663 drivers/net/ethernet/micrel/ksz884x.c 		set_tx_buf(desc, dma_buf->dma);
dma              4679 drivers/net/ethernet/micrel/ksz884x.c 			dma_buf->dma = pci_map_single(
dma              4684 drivers/net/ethernet/micrel/ksz884x.c 			set_tx_buf(desc, dma_buf->dma);
dma              4703 drivers/net/ethernet/micrel/ksz884x.c 		dma_buf->dma = pci_map_single(
dma              4706 drivers/net/ethernet/micrel/ksz884x.c 		set_tx_buf(desc, dma_buf->dma);
dma              4760 drivers/net/ethernet/micrel/ksz884x.c 			hw_priv->pdev, dma_buf->dma, dma_buf->len,
dma              4995 drivers/net/ethernet/micrel/ksz884x.c 		hw_priv->pdev, dma_buf->dma, packet_len + 4,
dma               226 drivers/net/ethernet/netronome/nfp/nfp_net.h 	dma_addr_t dma;
dma               348 drivers/net/ethernet/netronome/nfp/nfp_net.h 	dma_addr_t dma;
dma              2356 drivers/net/ethernet/netronome/nfp/nfp_net_common.c 				  tx_ring->txds, tx_ring->dma);
dma              2361 drivers/net/ethernet/netronome/nfp/nfp_net_common.c 	tx_ring->dma = 0;
dma              2381 drivers/net/ethernet/netronome/nfp/nfp_net_common.c 					   &tx_ring->dma,
dma              2508 drivers/net/ethernet/netronome/nfp/nfp_net_common.c 				  rx_ring->rxds, rx_ring->dma);
dma              2513 drivers/net/ethernet/netronome/nfp/nfp_net_common.c 	rx_ring->dma = 0;
dma              2539 drivers/net/ethernet/netronome/nfp/nfp_net_common.c 					   &rx_ring->dma,
dma              2787 drivers/net/ethernet/netronome/nfp/nfp_net_common.c 	nn_writeq(nn, NFP_NET_CFG_RXR_ADDR(idx), rx_ring->dma);
dma              2796 drivers/net/ethernet/netronome/nfp/nfp_net_common.c 	nn_writeq(nn, NFP_NET_CFG_TXR_ADDR(idx), tx_ring->dma);
dma                37 drivers/net/ethernet/netronome/nfp/nfp_net_debugfs.c 		   rx_ring->cnt, &rx_ring->dma, rx_ring->rxds,
dma               102 drivers/net/ethernet/netronome/nfp/nfp_net_debugfs.c 		   tx_ring->cnt, &tx_ring->dma, tx_ring->txds,
dma               153 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp.c 	bool dma;
dma               383 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp.c 			     FIELD_PREP(NSP_COMMAND_DMA_BUF, arg->dma) |
dma               583 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp.c 	arg->arg.dma = true;
dma               709 drivers/net/ethernet/nvidia/forcedeth.c 	dma_addr_t dma;
dma              1840 drivers/net/ethernet/nvidia/forcedeth.c 			np->put_rx_ctx->dma = dma_map_single(&np->pci_dev->dev,
dma              1845 drivers/net/ethernet/nvidia/forcedeth.c 						       np->put_rx_ctx->dma))) {
dma              1850 drivers/net/ethernet/nvidia/forcedeth.c 			np->put_rx.orig->buf = cpu_to_le32(np->put_rx_ctx->dma);
dma              1881 drivers/net/ethernet/nvidia/forcedeth.c 			np->put_rx_ctx->dma = dma_map_single(&np->pci_dev->dev,
dma              1886 drivers/net/ethernet/nvidia/forcedeth.c 						       np->put_rx_ctx->dma))) {
dma              1891 drivers/net/ethernet/nvidia/forcedeth.c 			np->put_rx.ex->bufhigh = cpu_to_le32(dma_high(np->put_rx_ctx->dma));
dma              1892 drivers/net/ethernet/nvidia/forcedeth.c 			np->put_rx.ex->buflow = cpu_to_le32(dma_low(np->put_rx_ctx->dma));
dma              1946 drivers/net/ethernet/nvidia/forcedeth.c 		np->rx_skb[i].dma = 0;
dma              1982 drivers/net/ethernet/nvidia/forcedeth.c 		np->tx_skb[i].dma = 0;
dma              2005 drivers/net/ethernet/nvidia/forcedeth.c 	if (tx_skb->dma) {
dma              2007 drivers/net/ethernet/nvidia/forcedeth.c 			dma_unmap_single(&np->pci_dev->dev, tx_skb->dma,
dma              2011 drivers/net/ethernet/nvidia/forcedeth.c 			dma_unmap_page(&np->pci_dev->dev, tx_skb->dma,
dma              2014 drivers/net/ethernet/nvidia/forcedeth.c 		tx_skb->dma = 0;
dma              2049 drivers/net/ethernet/nvidia/forcedeth.c 		np->tx_skb[i].dma = 0;
dma              2077 drivers/net/ethernet/nvidia/forcedeth.c 			dma_unmap_single(&np->pci_dev->dev, np->rx_skb[i].dma,
dma              2252 drivers/net/ethernet/nvidia/forcedeth.c 		np->put_tx_ctx->dma = dma_map_single(&np->pci_dev->dev,
dma              2256 drivers/net/ethernet/nvidia/forcedeth.c 					       np->put_tx_ctx->dma))) {
dma              2266 drivers/net/ethernet/nvidia/forcedeth.c 		put_tx->buf = cpu_to_le32(np->put_tx_ctx->dma);
dma              2289 drivers/net/ethernet/nvidia/forcedeth.c 			np->put_tx_ctx->dma = skb_frag_dma_map(
dma              2295 drivers/net/ethernet/nvidia/forcedeth.c 						       np->put_tx_ctx->dma))) {
dma              2313 drivers/net/ethernet/nvidia/forcedeth.c 			put_tx->buf = cpu_to_le32(np->put_tx_ctx->dma);
dma              2409 drivers/net/ethernet/nvidia/forcedeth.c 		np->put_tx_ctx->dma = dma_map_single(&np->pci_dev->dev,
dma              2413 drivers/net/ethernet/nvidia/forcedeth.c 					       np->put_tx_ctx->dma))) {
dma              2423 drivers/net/ethernet/nvidia/forcedeth.c 		put_tx->bufhigh = cpu_to_le32(dma_high(np->put_tx_ctx->dma));
dma              2424 drivers/net/ethernet/nvidia/forcedeth.c 		put_tx->buflow = cpu_to_le32(dma_low(np->put_tx_ctx->dma));
dma              2446 drivers/net/ethernet/nvidia/forcedeth.c 			np->put_tx_ctx->dma = skb_frag_dma_map(
dma              2453 drivers/net/ethernet/nvidia/forcedeth.c 						       np->put_tx_ctx->dma))) {
dma              2470 drivers/net/ethernet/nvidia/forcedeth.c 			put_tx->bufhigh = cpu_to_le32(dma_high(np->put_tx_ctx->dma));
dma              2471 drivers/net/ethernet/nvidia/forcedeth.c 			put_tx->buflow = cpu_to_le32(dma_low(np->put_tx_ctx->dma));
dma              2874 drivers/net/ethernet/nvidia/forcedeth.c 		dma_unmap_single(&np->pci_dev->dev, np->get_rx_ctx->dma,
dma              2976 drivers/net/ethernet/nvidia/forcedeth.c 		dma_unmap_single(&np->pci_dev->dev, np->get_rx_ctx->dma,
dma               425 drivers/net/ethernet/oki-semi/pch_gbe/pch_gbe.h 	dma_addr_t dma;
dma               444 drivers/net/ethernet/oki-semi/pch_gbe/pch_gbe.h 	dma_addr_t dma;
dma               464 drivers/net/ethernet/oki-semi/pch_gbe/pch_gbe.h 	dma_addr_t dma;
dma               807 drivers/net/ethernet/oki-semi/pch_gbe/pch_gbe_main.c 		   (unsigned long long)adapter->tx_ring->dma,
dma               811 drivers/net/ethernet/oki-semi/pch_gbe/pch_gbe_main.c 	tdba = adapter->tx_ring->dma;
dma               855 drivers/net/ethernet/oki-semi/pch_gbe/pch_gbe_main.c 		   (unsigned long long)adapter->rx_ring->dma,
dma               874 drivers/net/ethernet/oki-semi/pch_gbe/pch_gbe_main.c 	rdba = adapter->rx_ring->dma;
dma               890 drivers/net/ethernet/oki-semi/pch_gbe/pch_gbe_main.c 		dma_unmap_single(&adapter->pdev->dev, buffer_info->dma,
dma               910 drivers/net/ethernet/oki-semi/pch_gbe/pch_gbe_main.c 		dma_unmap_single(&adapter->pdev->dev, buffer_info->dma,
dma               948 drivers/net/ethernet/oki-semi/pch_gbe/pch_gbe_main.c 	iowrite32(tx_ring->dma, &hw->reg->TX_DSC_HW_P);
dma               980 drivers/net/ethernet/oki-semi/pch_gbe/pch_gbe_main.c 	iowrite32(rx_ring->dma, &hw->reg->RX_DSC_HW_P);
dma              1168 drivers/net/ethernet/oki-semi/pch_gbe/pch_gbe_main.c 	buffer_info->dma = dma_map_single(&adapter->pdev->dev, tmp_skb->data,
dma              1171 drivers/net/ethernet/oki-semi/pch_gbe/pch_gbe_main.c 	if (dma_mapping_error(&adapter->pdev->dev, buffer_info->dma)) {
dma              1173 drivers/net/ethernet/oki-semi/pch_gbe/pch_gbe_main.c 		buffer_info->dma = 0;
dma              1183 drivers/net/ethernet/oki-semi/pch_gbe/pch_gbe_main.c 	tx_desc->buffer_addr = (buffer_info->dma);
dma              1193 drivers/net/ethernet/oki-semi/pch_gbe/pch_gbe_main.c 	iowrite32(tx_ring->dma +
dma              1380 drivers/net/ethernet/oki-semi/pch_gbe/pch_gbe_main.c 		buffer_info->dma = dma_map_single(&pdev->dev,
dma              1384 drivers/net/ethernet/oki-semi/pch_gbe/pch_gbe_main.c 		if (dma_mapping_error(&adapter->pdev->dev, buffer_info->dma)) {
dma              1387 drivers/net/ethernet/oki-semi/pch_gbe/pch_gbe_main.c 			buffer_info->dma = 0;
dma              1393 drivers/net/ethernet/oki-semi/pch_gbe/pch_gbe_main.c 		rx_desc->buffer_addr = (buffer_info->dma);
dma              1398 drivers/net/ethernet/oki-semi/pch_gbe/pch_gbe_main.c 			   i, (unsigned long long)buffer_info->dma,
dma              1408 drivers/net/ethernet/oki-semi/pch_gbe/pch_gbe_main.c 		iowrite32(rx_ring->dma +
dma              1563 drivers/net/ethernet/oki-semi/pch_gbe/pch_gbe_main.c 			dma_unmap_single(&adapter->pdev->dev, buffer_info->dma,
dma              1652 drivers/net/ethernet/oki-semi/pch_gbe/pch_gbe_main.c 		dma_unmap_single(&pdev->dev, buffer_info->dma,
dma              1745 drivers/net/ethernet/oki-semi/pch_gbe/pch_gbe_main.c 					   &tx_ring->dma, GFP_KERNEL);
dma              1760 drivers/net/ethernet/oki-semi/pch_gbe/pch_gbe_main.c 		   tx_ring->desc, (unsigned long long)tx_ring->dma,
dma              1788 drivers/net/ethernet/oki-semi/pch_gbe/pch_gbe_main.c 						  &rx_ring->dma, GFP_KERNEL);
dma              1801 drivers/net/ethernet/oki-semi/pch_gbe/pch_gbe_main.c 		   rx_ring->desc, (unsigned long long)rx_ring->dma,
dma              1819 drivers/net/ethernet/oki-semi/pch_gbe/pch_gbe_main.c 	pci_free_consistent(pdev, tx_ring->size, tx_ring->desc, tx_ring->dma);
dma              1836 drivers/net/ethernet/oki-semi/pch_gbe/pch_gbe_main.c 	pci_free_consistent(pdev, rx_ring->size, rx_ring->desc, rx_ring->dma);
dma               525 drivers/net/ethernet/pasemi/pasemi_mac.c 		if (info->dma && info->skb) {
dma               529 drivers/net/ethernet/pasemi/pasemi_mac.c 						(TX_RING_SIZE-1)].dma;
dma               550 drivers/net/ethernet/pasemi/pasemi_mac.c 		if (info->skb && info->dma) {
dma               552 drivers/net/ethernet/pasemi/pasemi_mac.c 					 info->dma,
dma               557 drivers/net/ethernet/pasemi/pasemi_mac.c 		info->dma = 0;
dma               592 drivers/net/ethernet/pasemi/pasemi_mac.c 		dma_addr_t dma;
dma               603 drivers/net/ethernet/pasemi/pasemi_mac.c 		dma = pci_map_single(mac->dma_pdev, skb->data,
dma               607 drivers/net/ethernet/pasemi/pasemi_mac.c 		if (unlikely(pci_dma_mapping_error(mac->dma_pdev, dma))) {
dma               613 drivers/net/ethernet/pasemi/pasemi_mac.c 		info->dma = dma;
dma               614 drivers/net/ethernet/pasemi/pasemi_mac.c 		*buff = XCT_RXB_LEN(mac->bufsz) | XCT_RXB_ADDR(dma);
dma               705 drivers/net/ethernet/pasemi/pasemi_mac.c 	dma_addr_t dma;
dma               735 drivers/net/ethernet/pasemi/pasemi_mac.c 		dma = (RX_DESC(rx, n+2) & XCT_PTR_ADDR_M);
dma               744 drivers/net/ethernet/pasemi/pasemi_mac.c 		pci_unmap_single(pdev, dma, mac->bufsz - LOCAL_SKB_ALIGN,
dma               756 drivers/net/ethernet/pasemi/pasemi_mac.c 		info->dma = 0;
dma               864 drivers/net/ethernet/pasemi/pasemi_mac.c 		nr_frags = TX_DESC_INFO(txring, i).dma;
dma               878 drivers/net/ethernet/pasemi/pasemi_mac.c 			dmas[descr_count][j] = TX_DESC_INFO(txring, i+1+j).dma;
dma              1502 drivers/net/ethernet/pasemi/pasemi_mac.c 	TX_DESC_INFO(txring, fill).dma = nfrags;
dma              1508 drivers/net/ethernet/pasemi/pasemi_mac.c 		TX_DESC_INFO(txring, fill+i).dma = map[i];
dma                91 drivers/net/ethernet/pasemi/pasemi_mac.h 	dma_addr_t	dma;
dma               525 drivers/net/ethernet/qlogic/netxen/netxen_nic.h 	u64 dma;
dma               553 drivers/net/ethernet/qlogic/netxen/netxen_nic.h 	u64 dma;
dma               106 drivers/net/ethernet/qlogic/netxen/netxen_nic_init.c 					rx_buf->dma,
dma               126 drivers/net/ethernet/qlogic/netxen/netxen_nic_init.c 		if (buffrag->dma) {
dma               127 drivers/net/ethernet/qlogic/netxen/netxen_nic_init.c 			pci_unmap_single(adapter->pdev, buffrag->dma,
dma               129 drivers/net/ethernet/qlogic/netxen/netxen_nic_init.c 			buffrag->dma = 0ULL;
dma               133 drivers/net/ethernet/qlogic/netxen/netxen_nic_init.c 			if (buffrag->dma) {
dma               134 drivers/net/ethernet/qlogic/netxen/netxen_nic_init.c 				pci_unmap_page(adapter->pdev, buffrag->dma,
dma               137 drivers/net/ethernet/qlogic/netxen/netxen_nic_init.c 				buffrag->dma = 0ULL;
dma              1458 drivers/net/ethernet/qlogic/netxen/netxen_nic_init.c 	dma_addr_t dma;
dma              1470 drivers/net/ethernet/qlogic/netxen/netxen_nic_init.c 	dma = pci_map_single(pdev, skb->data,
dma              1473 drivers/net/ethernet/qlogic/netxen/netxen_nic_init.c 	if (pci_dma_mapping_error(pdev, dma)) {
dma              1480 drivers/net/ethernet/qlogic/netxen/netxen_nic_init.c 	buffer->dma = dma;
dma              1494 drivers/net/ethernet/qlogic/netxen/netxen_nic_init.c 	pci_unmap_single(adapter->pdev, buffer->dma, rds_ring->dma_size,
dma              1757 drivers/net/ethernet/qlogic/netxen/netxen_nic_init.c 			pci_unmap_single(pdev, frag->dma, frag->length,
dma              1759 drivers/net/ethernet/qlogic/netxen/netxen_nic_init.c 			frag->dma = 0ULL;
dma              1762 drivers/net/ethernet/qlogic/netxen/netxen_nic_init.c 				pci_unmap_page(pdev, frag->dma, frag->length,
dma              1764 drivers/net/ethernet/qlogic/netxen/netxen_nic_init.c 				frag->dma = 0ULL;
dma              1834 drivers/net/ethernet/qlogic/netxen/netxen_nic_init.c 		pdesc->addr_buffer = cpu_to_le64(buffer->dma);
dma              1896 drivers/net/ethernet/qlogic/netxen/netxen_nic_init.c 		pdesc->addr_buffer = cpu_to_le64(buffer->dma);
dma              1995 drivers/net/ethernet/qlogic/netxen/netxen_nic_main.c 	nf->dma = map;
dma              2007 drivers/net/ethernet/qlogic/netxen/netxen_nic_main.c 		nf->dma = map;
dma              2016 drivers/net/ethernet/qlogic/netxen/netxen_nic_main.c 		pci_unmap_page(pdev, nf->dma, nf->length, PCI_DMA_TODEVICE);
dma              2017 drivers/net/ethernet/qlogic/netxen/netxen_nic_main.c 		nf->dma = 0ULL;
dma              2021 drivers/net/ethernet/qlogic/netxen/netxen_nic_main.c 	pci_unmap_single(pdev, nf->dma, skb_headlen(skb), PCI_DMA_TODEVICE);
dma              2022 drivers/net/ethernet/qlogic/netxen/netxen_nic_main.c 	nf->dma = 0ULL;
dma              2113 drivers/net/ethernet/qlogic/netxen/netxen_nic_main.c 			hwdesc->addr_buffer1 = cpu_to_le64(buffrag->dma);
dma              2116 drivers/net/ethernet/qlogic/netxen/netxen_nic_main.c 			hwdesc->addr_buffer2 = cpu_to_le64(buffrag->dma);
dma              2119 drivers/net/ethernet/qlogic/netxen/netxen_nic_main.c 			hwdesc->addr_buffer3 = cpu_to_le64(buffrag->dma);
dma              2122 drivers/net/ethernet/qlogic/netxen/netxen_nic_main.c 			hwdesc->addr_buffer4 = cpu_to_le64(buffrag->dma);
dma               348 drivers/net/ethernet/qlogic/qlcnic/qlcnic.h 	u64 dma;
dma               371 drivers/net/ethernet/qlogic/qlcnic/qlcnic.h 	u64 dma;
dma                99 drivers/net/ethernet/qlogic/qlcnic/qlcnic_init.c 					rx_buf->dma,
dma               142 drivers/net/ethernet/qlogic/qlcnic/qlcnic_init.c 		if (buffrag->dma) {
dma               143 drivers/net/ethernet/qlogic/qlcnic/qlcnic_init.c 			pci_unmap_single(adapter->pdev, buffrag->dma,
dma               145 drivers/net/ethernet/qlogic/qlcnic/qlcnic_init.c 			buffrag->dma = 0ULL;
dma               149 drivers/net/ethernet/qlogic/qlcnic/qlcnic_init.c 			if (buffrag->dma) {
dma               150 drivers/net/ethernet/qlogic/qlcnic/qlcnic_init.c 				pci_unmap_page(adapter->pdev, buffrag->dma,
dma               153 drivers/net/ethernet/qlogic/qlcnic/qlcnic_init.c 				buffrag->dma = 0ULL;
dma               596 drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c 	nf->dma = map;
dma               607 drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c 		nf->dma = map;
dma               616 drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c 		pci_unmap_page(pdev, nf->dma, nf->length, PCI_DMA_TODEVICE);
dma               620 drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c 	pci_unmap_single(pdev, nf->dma, skb_headlen(skb), PCI_DMA_TODEVICE);
dma               634 drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c 		pci_unmap_page(pdev, nf->dma, nf->length, PCI_DMA_TODEVICE);
dma               638 drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c 	pci_unmap_single(pdev, nf->dma, skb_headlen(skb), PCI_DMA_TODEVICE);
dma               735 drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c 			hwdesc->addr_buffer1 = cpu_to_le64(buffrag->dma);
dma               738 drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c 			hwdesc->addr_buffer2 = cpu_to_le64(buffrag->dma);
dma               741 drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c 			hwdesc->addr_buffer3 = cpu_to_le64(buffrag->dma);
dma               744 drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c 			hwdesc->addr_buffer4 = cpu_to_le64(buffrag->dma);
dma               819 drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c 	dma_addr_t dma;
dma               829 drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c 	dma = pci_map_single(pdev, skb->data,
dma               832 drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c 	if (pci_dma_mapping_error(pdev, dma)) {
dma               839 drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c 	buffer->dma = dma;
dma               875 drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c 		pdesc->addr_buffer = cpu_to_le64(buffer->dma);
dma               907 drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c 			pci_unmap_single(pdev, frag->dma, frag->length,
dma               909 drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c 			frag->dma = 0ULL;
dma               912 drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c 				pci_unmap_page(pdev, frag->dma, frag->length,
dma               914 drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c 				frag->dma = 0ULL;
dma              1151 drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c 	pci_unmap_single(adapter->pdev, buffer->dma, ring->dma_size,
dma              1463 drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c 		pdesc->addr_buffer = cpu_to_le64(buffer->dma);
dma              1541 drivers/net/ethernet/realtek/8139cp.c 	dma_addr_t dma;
dma              1545 drivers/net/ethernet/realtek/8139cp.c 				       &dma, GFP_KERNEL);
dma              1550 drivers/net/ethernet/realtek/8139cp.c 	cpw32(StatsAddr + 4, (u64)dma >> 32);
dma              1551 drivers/net/ethernet/realtek/8139cp.c 	cpw32(StatsAddr, ((u64)dma & DMA_BIT_MASK(32)) | DumpStats);
dma              1580 drivers/net/ethernet/realtek/8139cp.c 	dma_free_coherent(&cp->pdev->dev, sizeof(*nic_stats), nic_stats, dma);
dma               367 drivers/net/ethernet/samsung/sxgbe/sxgbe_common.h 	const struct sxgbe_dma_ops *dma;
dma               304 drivers/net/ethernet/samsung/sxgbe/sxgbe_ethtool.c 	priv->hw->dma->rx_watchdog(priv->ioaddr, priv->rx_riwt);
dma               823 drivers/net/ethernet/samsung/sxgbe/sxgbe_main.c 	priv->hw->dma->stop_tx_queue(priv->ioaddr, queue_num);
dma               833 drivers/net/ethernet/samsung/sxgbe/sxgbe_main.c 	priv->hw->dma->start_tx_queue(priv->ioaddr, queue_num);
dma               958 drivers/net/ethernet/samsung/sxgbe/sxgbe_main.c 		priv->hw->dma->cha_init(priv->ioaddr, queue_num,
dma               964 drivers/net/ethernet/samsung/sxgbe/sxgbe_main.c 	return priv->hw->dma->init(priv->ioaddr, fixed_burst, burst_map);
dma              1160 drivers/net/ethernet/samsung/sxgbe/sxgbe_main.c 	priv->hw->dma->start_tx(priv->ioaddr, SXGBE_TX_QUEUES);
dma              1161 drivers/net/ethernet/samsung/sxgbe/sxgbe_main.c 	priv->hw->dma->start_rx(priv->ioaddr, SXGBE_RX_QUEUES);
dma              1169 drivers/net/ethernet/samsung/sxgbe/sxgbe_main.c 	if ((priv->use_riwt) && (priv->hw->dma->rx_watchdog)) {
dma              1171 drivers/net/ethernet/samsung/sxgbe/sxgbe_main.c 		priv->hw->dma->rx_watchdog(priv->ioaddr, SXGBE_MAX_DMA_RIWT);
dma              1219 drivers/net/ethernet/samsung/sxgbe/sxgbe_main.c 	priv->hw->dma->stop_tx(priv->ioaddr, SXGBE_TX_QUEUES);
dma              1220 drivers/net/ethernet/samsung/sxgbe/sxgbe_main.c 	priv->hw->dma->stop_rx(priv->ioaddr, SXGBE_RX_QUEUES);
dma              1418 drivers/net/ethernet/samsung/sxgbe/sxgbe_main.c 	priv->hw->dma->enable_dma_transmission(priv->ioaddr, txq_index);
dma              1561 drivers/net/ethernet/samsung/sxgbe/sxgbe_main.c 		priv->hw->dma->enable_dma_irq(priv->ioaddr, qnum);
dma              1627 drivers/net/ethernet/samsung/sxgbe/sxgbe_main.c 	status = priv->hw->dma->tx_dma_int_status(priv->ioaddr, txq->queue_no,
dma              1664 drivers/net/ethernet/samsung/sxgbe/sxgbe_main.c 	status = priv->hw->dma->rx_dma_int_status(priv->ioaddr, rxq->queue_no,
dma              1668 drivers/net/ethernet/samsung/sxgbe/sxgbe_main.c 		priv->hw->dma->disable_dma_irq(priv->ioaddr, rxq->queue_no);
dma              1974 drivers/net/ethernet/samsung/sxgbe/sxgbe_main.c 	ops_ptr->dma		= sxgbe_get_dma_ops();
dma              2118 drivers/net/ethernet/samsung/sxgbe/sxgbe_main.c 			priv->hw->dma->enable_tso(priv->ioaddr, queue_num);
dma              2209 drivers/net/ethernet/samsung/sxgbe/sxgbe_main.c 	priv->hw->dma->stop_rx(priv->ioaddr, SXGBE_RX_QUEUES);
dma              2210 drivers/net/ethernet/samsung/sxgbe/sxgbe_main.c 	priv->hw->dma->stop_tx(priv->ioaddr, SXGBE_TX_QUEUES);
dma              2070 drivers/net/ethernet/smsc/smc911x.c 	ndev->dma = (unsigned char)-1;
dma               219 drivers/net/ethernet/smsc/smc911x.h 		int reg, struct dma_chan *dma, u_char *buf, int len)
dma               233 drivers/net/ethernet/smsc/smc911x.h 	tx = dmaengine_prep_slave_single(dma, rx_dmabuf, rx_dmalen,
dma               239 drivers/net/ethernet/smsc/smc911x.h 		dma_async_issue_pending(dma);
dma               251 drivers/net/ethernet/smsc/smc911x.h 		int reg, struct dma_chan *dma, u_char *buf, int len)
dma               265 drivers/net/ethernet/smsc/smc911x.h 	tx = dmaengine_prep_slave_single(dma, tx_dmabuf, tx_dmalen,
dma               271 drivers/net/ethernet/smsc/smc911x.h 		dma_async_issue_pending(dma);
dma              2327 drivers/net/ethernet/smsc/smc91x.c 	ndev->dma = (unsigned char)-1;
dma               297 drivers/net/ethernet/smsc/smc91x.h 	smc_pxa_dma_insl(a, lp, r, dev->dma, p, l)
dma               325 drivers/net/ethernet/smsc/smc91x.h smc_pxa_dma_insl(void __iomem *ioaddr, struct smc_local *lp, int reg, int dma,
dma               366 drivers/net/ethernet/smsc/smc91x.h 	smc_pxa_dma_insw(a, lp, r, dev->dma, p, l)
dma               368 drivers/net/ethernet/smsc/smc91x.h smc_pxa_dma_insw(void __iomem *ioaddr, struct smc_local *lp, int reg, int dma,
dma               437 drivers/net/ethernet/stmicro/stmmac/common.h 	const struct stmmac_dma_ops *dma;
dma              1048 drivers/net/ethernet/stmicro/stmmac/dwmac-sun8i.c 	mac->dma = &sun8i_dwmac_dma_ops;
dma               247 drivers/net/ethernet/stmicro/stmmac/dwmac5.c 	u32 mtl, dma;
dma               254 drivers/net/ethernet/stmicro/stmmac/dwmac5.c 	dma = readl(ioaddr + DMA_SAFETY_INT_STATUS);
dma               256 drivers/net/ethernet/stmicro/stmmac/dwmac5.c 	err = (mtl & MCSIS) || (dma & MCSIS);
dma               263 drivers/net/ethernet/stmicro/stmmac/dwmac5.c 	err = (mtl & (MEUIS | MECIS)) || (dma & (MSUIS | MSCIS));
dma               264 drivers/net/ethernet/stmicro/stmmac/dwmac5.c 	corr = (mtl & MECIS) || (dma & MSCIS);
dma               270 drivers/net/ethernet/stmicro/stmmac/dwmac5.c 	err = dma & (DEUIS | DECIS);
dma               271 drivers/net/ethernet/stmicro/stmmac/dwmac5.c 	corr = dma & DECIS;
dma               816 drivers/net/ethernet/stmicro/stmmac/dwxgmac2_core.c 	u32 mtl, dma;
dma               823 drivers/net/ethernet/stmicro/stmmac/dwxgmac2_core.c 	dma = readl(ioaddr + XGMAC_DMA_SAFETY_INT_STATUS);
dma               825 drivers/net/ethernet/stmicro/stmmac/dwxgmac2_core.c 	err = (mtl & XGMAC_MCSIS) || (dma & XGMAC_MCSIS);
dma               833 drivers/net/ethernet/stmicro/stmmac/dwxgmac2_core.c 	      (dma & (XGMAC_MSUIS | XGMAC_MSCIS));
dma               834 drivers/net/ethernet/stmicro/stmmac/dwxgmac2_core.c 	corr = (mtl & XGMAC_MECIS) || (dma & XGMAC_MSCIS);
dma               840 drivers/net/ethernet/stmicro/stmmac/dwxgmac2_core.c 	err = dma & (XGMAC_DEUIS | XGMAC_DECIS);
dma               841 drivers/net/ethernet/stmicro/stmmac/dwxgmac2_core.c 	corr = dma & XGMAC_DECIS;
dma                79 drivers/net/ethernet/stmicro/stmmac/hwif.c 	const void *dma;
dma                99 drivers/net/ethernet/stmicro/stmmac/hwif.c 		.dma = &dwmac100_dma_ops,
dma               117 drivers/net/ethernet/stmicro/stmmac/hwif.c 		.dma = &dwmac1000_dma_ops,
dma               135 drivers/net/ethernet/stmicro/stmmac/hwif.c 		.dma = &dwmac4_dma_ops,
dma               153 drivers/net/ethernet/stmicro/stmmac/hwif.c 		.dma = &dwmac4_dma_ops,
dma               171 drivers/net/ethernet/stmicro/stmmac/hwif.c 		.dma = &dwmac410_dma_ops,
dma               189 drivers/net/ethernet/stmicro/stmmac/hwif.c 		.dma = &dwmac410_dma_ops,
dma               207 drivers/net/ethernet/stmicro/stmmac/hwif.c 		.dma = &dwxgmac210_dma_ops,
dma               273 drivers/net/ethernet/stmicro/stmmac/hwif.c 		mac->dma = mac->dma ? : entry->dma;
dma               214 drivers/net/ethernet/stmicro/stmmac/hwif.h 	stmmac_do_callback(__priv, dma, reset, __args)
dma               216 drivers/net/ethernet/stmicro/stmmac/hwif.h 	stmmac_do_void_callback(__priv, dma, init, __args)
dma               218 drivers/net/ethernet/stmicro/stmmac/hwif.h 	stmmac_do_void_callback(__priv, dma, init_chan, __args)
dma               220 drivers/net/ethernet/stmicro/stmmac/hwif.h 	stmmac_do_void_callback(__priv, dma, init_rx_chan, __args)
dma               222 drivers/net/ethernet/stmicro/stmmac/hwif.h 	stmmac_do_void_callback(__priv, dma, init_tx_chan, __args)
dma               224 drivers/net/ethernet/stmicro/stmmac/hwif.h 	stmmac_do_void_callback(__priv, dma, axi, __args)
dma               226 drivers/net/ethernet/stmicro/stmmac/hwif.h 	stmmac_do_void_callback(__priv, dma, dump_regs, __args)
dma               228 drivers/net/ethernet/stmicro/stmmac/hwif.h 	stmmac_do_void_callback(__priv, dma, dma_rx_mode, __args)
dma               230 drivers/net/ethernet/stmicro/stmmac/hwif.h 	stmmac_do_void_callback(__priv, dma, dma_tx_mode, __args)
dma               232 drivers/net/ethernet/stmicro/stmmac/hwif.h 	stmmac_do_void_callback(__priv, dma, dma_diagnostic_fr, __args)
dma               234 drivers/net/ethernet/stmicro/stmmac/hwif.h 	stmmac_do_void_callback(__priv, dma, enable_dma_transmission, __args)
dma               236 drivers/net/ethernet/stmicro/stmmac/hwif.h 	stmmac_do_void_callback(__priv, dma, enable_dma_irq, __args)
dma               238 drivers/net/ethernet/stmicro/stmmac/hwif.h 	stmmac_do_void_callback(__priv, dma, disable_dma_irq, __args)
dma               240 drivers/net/ethernet/stmicro/stmmac/hwif.h 	stmmac_do_void_callback(__priv, dma, start_tx, __args)
dma               242 drivers/net/ethernet/stmicro/stmmac/hwif.h 	stmmac_do_void_callback(__priv, dma, stop_tx, __args)
dma               244 drivers/net/ethernet/stmicro/stmmac/hwif.h 	stmmac_do_void_callback(__priv, dma, start_rx, __args)
dma               246 drivers/net/ethernet/stmicro/stmmac/hwif.h 	stmmac_do_void_callback(__priv, dma, stop_rx, __args)
dma               248 drivers/net/ethernet/stmicro/stmmac/hwif.h 	stmmac_do_callback(__priv, dma, dma_interrupt, __args)
dma               250 drivers/net/ethernet/stmicro/stmmac/hwif.h 	stmmac_do_void_callback(__priv, dma, get_hw_feature, __args)
dma               252 drivers/net/ethernet/stmicro/stmmac/hwif.h 	stmmac_do_void_callback(__priv, dma, rx_watchdog, __args)
dma               254 drivers/net/ethernet/stmicro/stmmac/hwif.h 	stmmac_do_void_callback(__priv, dma, set_tx_ring_len, __args)
dma               256 drivers/net/ethernet/stmicro/stmmac/hwif.h 	stmmac_do_void_callback(__priv, dma, set_rx_ring_len, __args)
dma               258 drivers/net/ethernet/stmicro/stmmac/hwif.h 	stmmac_do_void_callback(__priv, dma, set_rx_tail_ptr, __args)
dma               260 drivers/net/ethernet/stmicro/stmmac/hwif.h 	stmmac_do_void_callback(__priv, dma, set_tx_tail_ptr, __args)
dma               262 drivers/net/ethernet/stmicro/stmmac/hwif.h 	stmmac_do_void_callback(__priv, dma, enable_tso, __args)
dma               264 drivers/net/ethernet/stmicro/stmmac/hwif.h 	stmmac_do_void_callback(__priv, dma, qmode, __args)
dma               266 drivers/net/ethernet/stmicro/stmmac/hwif.h 	stmmac_do_void_callback(__priv, dma, set_bfsize, __args)
dma               268 drivers/net/ethernet/stmicro/stmmac/hwif.h 	stmmac_do_void_callback(__priv, dma, enable_sph, __args)
dma              5098 drivers/net/ethernet/sun/cassini.c 	dev->dma = 0;
dma              1189 drivers/net/ethernet/sun/sunbmac.c 	dev->dma = 0;
dma              2984 drivers/net/ethernet/sun/sungem.c 	dev->dma = 0;
dma               898 drivers/net/ethernet/sun/sunqe.c 	dev->dma = 0;
dma              1031 drivers/net/ethernet/tehuti/tehuti.c 		dm->dma = 0;
dma              1035 drivers/net/ethernet/tehuti/tehuti.c 		if (dm->dma) {
dma              1037 drivers/net/ethernet/tehuti/tehuti.c 					 dm->dma, f->m.pktsz,
dma              1100 drivers/net/ethernet/tehuti/tehuti.c 		dm->dma = pci_map_single(priv->pdev,
dma              1107 drivers/net/ethernet/tehuti/tehuti.c 		rxfd->pa_lo = CPU_CHIP_SWAP32(L32_64(dm->dma));
dma              1108 drivers/net/ethernet/tehuti/tehuti.c 		rxfd->pa_hi = CPU_CHIP_SWAP32(H32_64(dm->dma));
dma              1162 drivers/net/ethernet/tehuti/tehuti.c 	rxfd->pa_lo = CPU_CHIP_SWAP32(L32_64(dm->dma));
dma              1163 drivers/net/ethernet/tehuti/tehuti.c 	rxfd->pa_hi = CPU_CHIP_SWAP32(H32_64(dm->dma));
dma              1263 drivers/net/ethernet/tehuti/tehuti.c 						    dm->dma, rxf_fifo->m.pktsz,
dma              1270 drivers/net/ethernet/tehuti/tehuti.c 					 dm->dma, rxf_fifo->m.pktsz,
dma              1493 drivers/net/ethernet/tehuti/tehuti.c 	db->wptr->addr.dma = pci_map_single(priv->pdev, skb->data,
dma              1496 drivers/net/ethernet/tehuti/tehuti.c 	pbl->pa_lo = CPU_CHIP_SWAP32(L32_64(db->wptr->addr.dma));
dma              1497 drivers/net/ethernet/tehuti/tehuti.c 	pbl->pa_hi = CPU_CHIP_SWAP32(H32_64(db->wptr->addr.dma));
dma              1508 drivers/net/ethernet/tehuti/tehuti.c 		db->wptr->addr.dma = skb_frag_dma_map(&priv->pdev->dev, frag,
dma              1514 drivers/net/ethernet/tehuti/tehuti.c 		pbl->pa_lo = CPU_CHIP_SWAP32(L32_64(db->wptr->addr.dma));
dma              1515 drivers/net/ethernet/tehuti/tehuti.c 		pbl->pa_hi = CPU_CHIP_SWAP32(H32_64(db->wptr->addr.dma));
dma              1730 drivers/net/ethernet/tehuti/tehuti.c 			BDX_ASSERT(db->rptr->addr.dma == 0);
dma              1731 drivers/net/ethernet/tehuti/tehuti.c 			pci_unmap_page(priv->pdev, db->rptr->addr.dma,
dma              1780 drivers/net/ethernet/tehuti/tehuti.c 			pci_unmap_page(priv->pdev, db->rptr->addr.dma,
dma               172 drivers/net/ethernet/tehuti/tehuti.h 	u64 dma;
dma               184 drivers/net/ethernet/tehuti/tehuti.h 	dma_addr_t dma;
dma               340 drivers/net/ethernet/ti/cpsw.c 	cpdma_ctlr_int_ctrl(cpsw->dma, true);
dma               349 drivers/net/ethernet/ti/cpsw.c 	cpdma_ctlr_int_ctrl(cpsw->dma, false);
dma               456 drivers/net/ethernet/ti/cpsw.c 	dma_addr_t dma;
dma               466 drivers/net/ethernet/ti/cpsw.c 		dma = page_pool_get_dma_addr(page);
dma               467 drivers/net/ethernet/ti/cpsw.c 		dma += xdpf->headroom + sizeof(struct xdp_frame);
dma               469 drivers/net/ethernet/ti/cpsw.c 					       dma, xdpf->len, port);
dma               686 drivers/net/ethernet/ti/cpsw.c 	dma_addr_t		dma;
dma               779 drivers/net/ethernet/ti/cpsw.c 	dma = page_pool_get_dma_addr(new_page) + CPSW_HEADROOM;
dma               780 drivers/net/ethernet/ti/cpsw.c 	ret = cpdma_chan_submit_mapped(cpsw->rxv[ch].ch, new_page, dma,
dma               878 drivers/net/ethernet/ti/cpsw.c 	cpdma_ctlr_eoi(cpsw->dma, CPDMA_EOI_TX);
dma               894 drivers/net/ethernet/ti/cpsw.c 	cpdma_ctlr_eoi(cpsw->dma, CPDMA_EOI_RX);
dma               913 drivers/net/ethernet/ti/cpsw.c 	ch_map = cpdma_ctrl_txchs_state(cpsw->dma);
dma               963 drivers/net/ethernet/ti/cpsw.c 	ch_map = cpdma_ctrl_rxchs_state(cpsw->dma);
dma              1359 drivers/net/ethernet/ti/cpsw.c 	dma_addr_t dma;
dma              1375 drivers/net/ethernet/ti/cpsw.c 			dma = page_pool_get_dma_addr(page) + CPSW_HEADROOM;
dma              1377 drivers/net/ethernet/ti/cpsw.c 							    page, dma,
dma              1748 drivers/net/ethernet/ti/cpsw.c 	cpdma_ctlr_start(cpsw->dma);
dma              1756 drivers/net/ethernet/ti/cpsw.c 		cpdma_ctlr_stop(cpsw->dma);
dma              1781 drivers/net/ethernet/ti/cpsw.c 		cpdma_ctlr_stop(cpsw->dma);
dma              2232 drivers/net/ethernet/ti/cpsw.c 	min_rate = cpdma_chan_get_min_rate(cpsw->dma);
dma              2859 drivers/net/ethernet/ti/cpsw.c 	cpsw->txv[0].ch = cpdma_chan_create(cpsw->dma, ch, cpsw_tx_handler, 0);
dma              2866 drivers/net/ethernet/ti/cpsw.c 	cpsw->rxv[0].ch = cpdma_chan_create(cpsw->dma, 0, cpsw_rx_handler, 1);
dma              2964 drivers/net/ethernet/ti/cpsw.c 	cpdma_ctlr_destroy(cpsw->dma);
dma              2989 drivers/net/ethernet/ti/cpsw.c 	cpdma_ctlr_destroy(cpsw->dma);
dma               480 drivers/net/ethernet/ti/cpsw_ethtool.c 	cpdma_ctlr_stop(cpsw->dma);
dma               495 drivers/net/ethernet/ti/cpsw_ethtool.c 		cpdma_ctlr_start(cpsw->dma);
dma               552 drivers/net/ethernet/ti/cpsw_ethtool.c 		vec[*ch].ch = cpdma_chan_create(cpsw->dma, vch, handler, rx);
dma               663 drivers/net/ethernet/ti/cpsw_ethtool.c 	ering->tx_pending = cpdma_get_num_tx_descs(cpsw->dma);
dma               665 drivers/net/ethernet/ti/cpsw_ethtool.c 	ering->rx_pending = cpdma_get_num_rx_descs(cpsw->dma);
dma               681 drivers/net/ethernet/ti/cpsw_ethtool.c 	descs_num = cpdma_get_num_rx_descs(cpsw->dma);
dma               687 drivers/net/ethernet/ti/cpsw_ethtool.c 	ret = cpdma_set_num_rx_descs(cpsw->dma, ering->rx_pending);
dma               706 drivers/net/ethernet/ti/cpsw_ethtool.c 	cpdma_set_num_rx_descs(cpsw->dma, descs_num);
dma               119 drivers/net/ethernet/ti/cpsw_priv.c 	cpsw->dma = cpdma_ctlr_create(&dma_params);
dma               120 drivers/net/ethernet/ti/cpsw_priv.c 	if (!cpsw->dma) {
dma               128 drivers/net/ethernet/ti/cpsw_priv.c 		cpdma_ctlr_destroy(cpsw->dma);
dma               338 drivers/net/ethernet/ti/cpsw_priv.h 	struct cpdma_ctlr		*dma;
dma               288 drivers/net/ethernet/ti/davinci_cpdma.c desc_from_phys(struct cpdma_desc_pool *pool, dma_addr_t dma)
dma               290 drivers/net/ethernet/ti/davinci_cpdma.c 	return dma ? pool->iomap + dma - pool->hw_addr : NULL;
dma               319 drivers/net/ethernet/ti/davinci_emac.c 	struct cpdma_ctlr *dma;
dma              1467 drivers/net/ethernet/ti/davinci_emac.c 	cpdma_ctlr_start(priv->dma);
dma              1549 drivers/net/ethernet/ti/davinci_emac.c 	cpdma_ctlr_stop(priv->dma);
dma              1577 drivers/net/ethernet/ti/davinci_emac.c 	cpdma_ctlr_stop(priv->dma);
dma              1865 drivers/net/ethernet/ti/davinci_emac.c 	priv->dma = cpdma_ctlr_create(&dma_params);
dma              1866 drivers/net/ethernet/ti/davinci_emac.c 	if (!priv->dma) {
dma              1872 drivers/net/ethernet/ti/davinci_emac.c 	priv->txchan = cpdma_chan_create(priv->dma, EMAC_DEF_TX_CH,
dma              1880 drivers/net/ethernet/ti/davinci_emac.c 	priv->rxchan = cpdma_chan_create(priv->dma, EMAC_DEF_RX_CH,
dma              1948 drivers/net/ethernet/ti/davinci_emac.c 	cpdma_ctlr_destroy(priv->dma);
dma              1977 drivers/net/ethernet/ti/davinci_emac.c 	cpdma_ctlr_destroy(priv->dma);
dma               621 drivers/net/ethernet/ti/netcp_core.c 	dma_addr_t dma;
dma               624 drivers/net/ethernet/ti/netcp_core.c 		dma = knav_queue_pop(netcp->rx_queue, &dma_sz);
dma               625 drivers/net/ethernet/ti/netcp_core.c 		if (!dma)
dma               628 drivers/net/ethernet/ti/netcp_core.c 		desc = knav_pool_desc_unmap(netcp->rx_pool, dma, dma_sz);
dma               793 drivers/net/ethernet/ti/netcp_core.c 	dma_addr_t dma;
dma               797 drivers/net/ethernet/ti/netcp_core.c 	while ((dma = knav_queue_pop(netcp->rx_fdq[fdq], &dma_sz))) {
dma               798 drivers/net/ethernet/ti/netcp_core.c 		desc = knav_pool_desc_unmap(netcp->rx_pool, dma, dma_sz);
dma               804 drivers/net/ethernet/ti/netcp_core.c 		get_org_pkt_info(&dma, &buf_len, desc);
dma               810 drivers/net/ethernet/ti/netcp_core.c 		if (unlikely(!dma)) {
dma               823 drivers/net/ethernet/ti/netcp_core.c 			dma_unmap_single(netcp->dev, dma, buf_len,
dma               827 drivers/net/ethernet/ti/netcp_core.c 			dma_unmap_page(netcp->dev, dma, buf_len,
dma               858 drivers/net/ethernet/ti/netcp_core.c 	dma_addr_t dma;
dma               884 drivers/net/ethernet/ti/netcp_core.c 		dma = dma_map_single(netcp->dev, bufptr, buf_len,
dma               886 drivers/net/ethernet/ti/netcp_core.c 		if (unlikely(dma_mapping_error(netcp->dev, dma)))
dma               901 drivers/net/ethernet/ti/netcp_core.c 		dma = dma_map_page(netcp->dev, page, 0, buf_len, DMA_TO_DEVICE);
dma               915 drivers/net/ethernet/ti/netcp_core.c 	set_org_pkt_info(dma, buf_len, hwdesc);
dma               921 drivers/net/ethernet/ti/netcp_core.c 	knav_pool_desc_map(netcp->rx_pool, hwdesc, sizeof(*hwdesc), &dma,
dma               923 drivers/net/ethernet/ti/netcp_core.c 	knav_queue_push(netcp->rx_fdq[fdq], dma, sizeof(*hwdesc), 0);
dma              1010 drivers/net/ethernet/ti/netcp_core.c 	dma_addr_t dma;
dma              1014 drivers/net/ethernet/ti/netcp_core.c 		dma = knav_queue_pop(netcp->tx_compl_q, &dma_sz);
dma              1015 drivers/net/ethernet/ti/netcp_core.c 		if (!dma)
dma              1017 drivers/net/ethernet/ti/netcp_core.c 		desc = knav_pool_desc_unmap(netcp->tx_pool, dma, dma_sz);
dma              1179 drivers/net/ethernet/ti/netcp_core.c 	dma_addr_t dma;
dma              1250 drivers/net/ethernet/ti/netcp_core.c 	ret = knav_pool_desc_map(netcp->tx_pool, desc, sizeof(*desc), &dma,
dma              1258 drivers/net/ethernet/ti/netcp_core.c 	knav_queue_push(tx_pipe->dma_queue, dma, dma_sz, 0);
dma              1211 drivers/net/ethernet/via/via-rhine.c 	dma_addr_t dma;
dma              1225 drivers/net/ethernet/via/via-rhine.c 	sd->dma = dma_map_single(hwdev, sd->skb->data, size, DMA_FROM_DEVICE);
dma              1226 drivers/net/ethernet/via/via-rhine.c 	if (unlikely(dma_mapping_error(hwdev, sd->dma))) {
dma              1248 drivers/net/ethernet/via/via-rhine.c 	rp->rx_skbuff_dma[entry] = sd->dma;
dma              1251 drivers/net/ethernet/via/via-rhine.c 	rp->rx_ring[entry].addr = cpu_to_le32(sd->dma);
dma               680 drivers/net/fddi/defza.c 	dma_addr_t dma, newdma;
dma               693 drivers/net/fddi/defza.c 		dma = fp->rx_dma[i];
dma               704 drivers/net/fddi/defza.c 					dma +
dma               730 drivers/net/fddi/defza.c 			dma_unmap_single(fp->bdev, dma, FZA_RX_BUFFER_SIZE,
dma               765 drivers/net/fddi/defza.c 			dma = newdma;
dma               767 drivers/net/fddi/defza.c 			fp->rx_dma[i] = dma;
dma               776 drivers/net/fddi/defza.c 		buf = (dma + 0x1000) >> 9;
dma               778 drivers/net/fddi/defza.c 		buf = dma >> 9 | FZA_RING_OWN_FZA;
dma              1153 drivers/net/fddi/defza.c 	dma_addr_t dma;
dma              1163 drivers/net/fddi/defza.c 			dma = dma_map_single(fp->bdev, skb->data,
dma              1166 drivers/net/fddi/defza.c 			if (dma_mapping_error(fp->bdev, dma)) {
dma              1183 drivers/net/fddi/defza.c 		fp->rx_dma[i] = dma;
dma                61 drivers/net/fddi/skfp/h/targethw.h 	short	dma ;			/* DMA channel */
dma              1044 drivers/net/hamradio/baycom_epp.c 		hi.data.mp.dma = dev->dma;
dma              1056 drivers/net/hamradio/baycom_epp.c 		dev->dma = /*hi.data.mp.dma*/0;
dma               162 drivers/net/hamradio/dmascc.c 	int dma;		/* -1 (disable), 0, 1, 3 */
dma               568 drivers/net/hamradio/dmascc.c 		priv->param.dma = -1;
dma               728 drivers/net/hamradio/dmascc.c 	if (priv->param.dma >= 0) {
dma               729 drivers/net/hamradio/dmascc.c 		if (request_dma(priv->param.dma, "dmascc")) {
dma               735 drivers/net/hamradio/dmascc.c 			clear_dma_ff(priv->param.dma);
dma               792 drivers/net/hamradio/dmascc.c 		if (priv->param.dma >= 0) {
dma               872 drivers/net/hamradio/dmascc.c 	if (priv->param.dma >= 0) {
dma               875 drivers/net/hamradio/dmascc.c 		free_dma(priv->param.dma);
dma               972 drivers/net/hamradio/dmascc.c 	if (priv->param.dma >= 0) {
dma               976 drivers/net/hamradio/dmascc.c 		set_dma_mode(priv->param.dma, DMA_MODE_WRITE);
dma               977 drivers/net/hamradio/dmascc.c 		set_dma_addr(priv->param.dma,
dma               979 drivers/net/hamradio/dmascc.c 		set_dma_count(priv->param.dma,
dma               986 drivers/net/hamradio/dmascc.c 			outb((priv->param.dma ==
dma               998 drivers/net/hamradio/dmascc.c 		enable_dma(priv->param.dma);
dma              1020 drivers/net/hamradio/dmascc.c 	if (priv->param.dma >= 0) {
dma              1023 drivers/net/hamradio/dmascc.c 		set_dma_mode(priv->param.dma, DMA_MODE_READ);
dma              1024 drivers/net/hamradio/dmascc.c 		set_dma_addr(priv->param.dma,
dma              1026 drivers/net/hamradio/dmascc.c 		set_dma_count(priv->param.dma, BUF_SIZE);
dma              1028 drivers/net/hamradio/dmascc.c 		enable_dma(priv->param.dma);
dma              1031 drivers/net/hamradio/dmascc.c 			outb((priv->param.dma ==
dma              1055 drivers/net/hamradio/dmascc.c 	if (priv->param.dma >= 0 && priv->type == TYPE_TWIN)
dma              1060 drivers/net/hamradio/dmascc.c 	if (priv->param.dma >= 0)
dma              1061 drivers/net/hamradio/dmascc.c 		disable_dma(priv->param.dma);
dma              1158 drivers/net/hamradio/dmascc.c 	if (priv->param.dma >= 0) {
dma              1192 drivers/net/hamradio/dmascc.c 		if (priv->param.dma < 0)
dma              1196 drivers/net/hamradio/dmascc.c 		if (priv->param.dma >= 0) {
dma              1198 drivers/net/hamradio/dmascc.c 			cb = BUF_SIZE - get_dma_residue(priv->param.dma) -
dma              1235 drivers/net/hamradio/dmascc.c 		if (priv->param.dma >= 0) {
dma              1237 drivers/net/hamradio/dmascc.c 			set_dma_addr(priv->param.dma,
dma              1239 drivers/net/hamradio/dmascc.c 			set_dma_count(priv->param.dma, BUF_SIZE);
dma              1325 drivers/net/hamradio/dmascc.c 		if (priv->param.dma >= 0) {
dma              1326 drivers/net/hamradio/dmascc.c 			disable_dma(priv->param.dma);
dma              1328 drivers/net/hamradio/dmascc.c 			res = get_dma_residue(priv->param.dma);
dma              1335 drivers/net/hamradio/dmascc.c 		if (priv->param.dma >= 0 && priv->type == TYPE_TWIN)
dma               527 drivers/net/hamradio/hdlcdrv.c 		bi.data.mp.dma = dev->dma;
dma               539 drivers/net/hamradio/hdlcdrv.c 		dev->dma = bi.data.mp.dma;
dma               684 drivers/net/hamradio/hdlcdrv.c 				    unsigned int dma) 
dma               707 drivers/net/hamradio/hdlcdrv.c 	dev->dma = dma;
dma               408 drivers/net/vmxnet3/vmxnet3_int.h #define VMXNET3_GET_ADDR_LO(dma)   ((u32)(dma))
dma               409 drivers/net/vmxnet3/vmxnet3_int.h #define VMXNET3_GET_ADDR_HI(dma)   ((u32)(((u64)(dma)) >> 32))
dma               135 drivers/net/wan/cosa.c 	unsigned short irq, dma;	/* IRQ and DMA number */
dma               210 drivers/net/wan/cosa.c static int dma[MAX_CARDS+1] = { 1, 7, 1, 7, 1, 7, 1, 7, 0, };
dma               213 drivers/net/wan/cosa.c static int dma[MAX_CARDS+1];
dma               226 drivers/net/wan/cosa.c module_param_hw_array(dma, int, dma, NULL, 0);
dma               227 drivers/net/wan/cosa.c MODULE_PARM_DESC(dma, "The DMA channels of the COSA or SRP cards");
dma               257 drivers/net/wan/cosa.c static int cosa_probe(int ioaddr, int irq, int dma);
dma               367 drivers/net/wan/cosa.c 		cosa_probe(io[i], irq[i], dma[i]);
dma               412 drivers/net/wan/cosa.c 		free_dma(cosa->dma);
dma               427 drivers/net/wan/cosa.c static int cosa_probe(int base, int irq, int dma)
dma               447 drivers/net/wan/cosa.c 	if (dma < 0 || dma == 4 || dma > 7) {
dma               448 drivers/net/wan/cosa.c 		pr_info("invalid DMA %d\n", dma);
dma               453 drivers/net/wan/cosa.c 	if (((base & 0x8) && dma < 4) || (!(base & 0x8) && dma > 3)) {
dma               455 drivers/net/wan/cosa.c 			base, dma);
dma               459 drivers/net/wan/cosa.c 	cosa->dma = dma;
dma               535 drivers/net/wan/cosa.c 	if (request_dma(cosa->dma, cosa->type)) {
dma               577 drivers/net/wan/cosa.c 		chan->netdev->dma = chan->cosa->dma;
dma               589 drivers/net/wan/cosa.c 		cosa->datareg, cosa->irq, cosa->dma, cosa->nchannels);
dma               602 drivers/net/wan/cosa.c 	free_dma(cosa->dma);
dma              1338 drivers/net/wan/cosa.c 	disable_dma(cosa->dma);
dma              1339 drivers/net/wan/cosa.c 	clear_dma_ff(cosa->dma);
dma              1773 drivers/net/wan/cosa.c 		set_dma_mode(cosa->dma, DMA_MODE_CASCADE);
dma              1774 drivers/net/wan/cosa.c 		enable_dma(cosa->dma);
dma              1779 drivers/net/wan/cosa.c 		disable_dma(cosa->dma);
dma              1780 drivers/net/wan/cosa.c 		clear_dma_ff(cosa->dma);
dma              1781 drivers/net/wan/cosa.c 		set_dma_mode(cosa->dma, DMA_MODE_WRITE);
dma              1782 drivers/net/wan/cosa.c 		set_dma_addr(cosa->dma, virt_to_bus(cosa->txbuf));
dma              1783 drivers/net/wan/cosa.c 		set_dma_count(cosa->dma, cosa->txsize);
dma              1784 drivers/net/wan/cosa.c 		enable_dma(cosa->dma);
dma              1858 drivers/net/wan/cosa.c 	disable_dma(cosa->dma);
dma              1859 drivers/net/wan/cosa.c 	clear_dma_ff(cosa->dma);
dma              1860 drivers/net/wan/cosa.c 	set_dma_mode(cosa->dma, DMA_MODE_READ);
dma              1862 drivers/net/wan/cosa.c 		set_dma_addr(cosa->dma, virt_to_bus(cosa->rxbuf));
dma              1864 drivers/net/wan/cosa.c 		set_dma_addr(cosa->dma, virt_to_bus(cosa->bouncebuf));
dma              1866 drivers/net/wan/cosa.c 	set_dma_count(cosa->dma, (cosa->rxsize&0x1fff));
dma              1867 drivers/net/wan/cosa.c 	enable_dma(cosa->dma);
dma              1886 drivers/net/wan/cosa.c 	disable_dma(cosa->dma);
dma              1887 drivers/net/wan/cosa.c 	clear_dma_ff(cosa->dma);
dma               881 drivers/net/wan/farsync.c fst_rx_dma(struct fst_card_info *card, dma_addr_t dma, u32 mem, int len)
dma               887 drivers/net/wan/farsync.c 	dbg(DBG_RX, "In fst_rx_dma %x %x %d\n", (u32)dma, mem, len);
dma               892 drivers/net/wan/farsync.c 	outl(dma, card->pci_conf + DMAPADR0);	/* Copy to here */
dma               908 drivers/net/wan/farsync.c fst_tx_dma(struct fst_card_info *card, dma_addr_t dma, u32 mem, int len)
dma               914 drivers/net/wan/farsync.c 	dbg(DBG_TX, "In fst_tx_dma %x %x %d\n", (u32)dma, mem, len);
dma               919 drivers/net/wan/farsync.c 	outl(dma, card->pci_conf + DMAPADR1);	/* Copy from here */
dma                45 drivers/net/wan/hostess_sv11.c static int dma;
dma                87 drivers/net/wan/hostess_sv11.c 	switch (dma) {
dma               104 drivers/net/wan/hostess_sv11.c 		switch (dma) {
dma               138 drivers/net/wan/hostess_sv11.c 	switch (dma) {
dma               234 drivers/net/wan/hostess_sv11.c 	if (dma) {
dma               245 drivers/net/wan/hostess_sv11.c 		if (dma == 1)
dma               296 drivers/net/wan/hostess_sv11.c 	if (dma == 1)
dma               299 drivers/net/wan/hostess_sv11.c 	if (dma)
dma               315 drivers/net/wan/hostess_sv11.c 	if (dma) {
dma               316 drivers/net/wan/hostess_sv11.c 		if (dma == 1)
dma               330 drivers/net/wan/hostess_sv11.c module_param_hw(dma, int, dma, 0);
dma               331 drivers/net/wan/hostess_sv11.c MODULE_PARM_DESC(dma, "Set this to 1 to use DMA1/DMA3 for TX/RX");
dma               363 drivers/net/wan/sealevel.c module_param_hw(txdma, int, dma, 0);
dma               365 drivers/net/wan/sealevel.c module_param_hw(rxdma, int, dma, 0);
dma               798 drivers/net/wireless/ath/ath5k/pcu.c 	unsigned int nbtt, atim, dma;
dma               802 drivers/net/wireless/ath/ath5k/pcu.c 	dma = ath5k_hw_reg_read(ah, AR5K_TIMER1) >> 3;
dma               809 drivers/net/wireless/ath/ath5k/pcu.c 	    ath5k_check_timer_win(dma, nbtt, AR5K_TUNE_DMA_BEACON_RESP,
dma               134 drivers/net/wireless/ath/wil6210/debugfs.c 				seq_printf(s, "%c", (d->dma.status & BIT(0)) ?
dma               173 drivers/net/wireless/ath/wil6210/pmc.c 		d->dma.addr.addr_low =
dma               175 drivers/net/wireless/ath/wil6210/pmc.c 		d->dma.addr.addr_high =
dma               177 drivers/net/wireless/ath/wil6210/pmc.c 		d->dma.status = 0; /* 0 = HW_OWNED */
dma               178 drivers/net/wireless/ath/wil6210/pmc.c 		d->dma.length = cpu_to_le16(descriptor_size);
dma               179 drivers/net/wireless/ath/wil6210/pmc.c 		d->dma.d0 = BIT(9) | RX_DMA_D0_CMD_DMA_IT;
dma               176 drivers/net/wireless/ath/wil6210/trace.h 		__entry->len = d->dma.length;
dma               172 drivers/net/wireless/ath/wil6210/txrx.c 		_d->dma.status = TX_DMA_STATUS_DU;
dma               185 drivers/net/wireless/ath/wil6210/txrx.c 	dma_addr_t pa = wil_desc_addr(&d->dma.addr);
dma               186 drivers/net/wireless/ath/wil6210/txrx.c 	u16 dmalen = le16_to_cpu(d->dma.length);
dma               248 drivers/net/wireless/ath/wil6210/txrx.c 			pa = wil_desc_addr(&d->dma.addr);
dma               249 drivers/net/wireless/ath/wil6210/txrx.c 			dmalen = le16_to_cpu(d->dma.length);
dma               295 drivers/net/wireless/ath/wil6210/txrx.c 	d->dma.d0 = RX_DMA_D0_CMD_DMA_RT | RX_DMA_D0_CMD_DMA_IT;
dma               296 drivers/net/wireless/ath/wil6210/txrx.c 	wil_desc_addr_set(&d->dma.addr, pa);
dma               300 drivers/net/wireless/ath/wil6210/txrx.c 	d->dma.status = 0; /* BIT(0) should be 0 for HW_OWNED */
dma               301 drivers/net/wireless/ath/wil6210/txrx.c 	d->dma.length = cpu_to_le16(sz);
dma               352 drivers/net/wireless/ath/wil6210/txrx.c 	if (d->dma.status & RX_DMA_STATUS_ERROR)
dma               369 drivers/net/wireless/ath/wil6210/txrx.c 	if (_d->dma.status & RX_DMA_STATUS_DU)
dma               478 drivers/net/wireless/ath/wil6210/txrx.c 	if (unlikely(!(_d->dma.status & RX_DMA_STATUS_DU))) {
dma               492 drivers/net/wireless/ath/wil6210/txrx.c 	pa = wil_desc_addr(&d->dma.addr);
dma               495 drivers/net/wireless/ath/wil6210/txrx.c 	dmalen = le16_to_cpu(d->dma.length);
dma               583 drivers/net/wireless/ath/wil6210/txrx.c 	if (likely(d->dma.status & RX_DMA_STATUS_L4I)) {
dma               585 drivers/net/wireless/ath/wil6210/txrx.c 		if (likely((d->dma.error & RX_DMA_ERROR_L4_ERR) == 0))
dma               708 drivers/net/wireless/ath/wil6210/txrx.c 	if ((d->dma.status & RX_DMA_STATUS_ERROR) &&
dma               709 drivers/net/wireless/ath/wil6210/txrx.c 	    (d->dma.error & RX_DMA_ERROR_MIC)) {
dma              1131 drivers/net/wireless/ath/wil6210/txrx.c 	wil_desc_addr_set(&d->dma.addr, pa);
dma              1132 drivers/net/wireless/ath/wil6210/txrx.c 	d->dma.ip_length = 0;
dma              1134 drivers/net/wireless/ath/wil6210/txrx.c 	d->dma.b11 = 0/*14 | BIT(7)*/;
dma              1135 drivers/net/wireless/ath/wil6210/txrx.c 	d->dma.error = 0;
dma              1136 drivers/net/wireless/ath/wil6210/txrx.c 	d->dma.status = 0; /* BIT(0) should be 0 for HW_OWNED */
dma              1137 drivers/net/wireless/ath/wil6210/txrx.c 	d->dma.length = cpu_to_le16((u16)len);
dma              1138 drivers/net/wireless/ath/wil6210/txrx.c 	d->dma.d0 = (vring_index << DMA_CFG_DESC_TX_0_QID_POS);
dma              1642 drivers/net/wireless/ath/wil6210/txrx.c 	d->dma.b11 = ETH_HLEN; /* MAC header length */
dma              1643 drivers/net/wireless/ath/wil6210/txrx.c 	d->dma.b11 |= is_ipv4 << DMA_CFG_DESC_TX_OFFLOAD_CFG_L3T_IPV4_POS;
dma              1645 drivers/net/wireless/ath/wil6210/txrx.c 	d->dma.d0 |= (2 << DMA_CFG_DESC_TX_0_L4_TYPE_POS);
dma              1647 drivers/net/wireless/ath/wil6210/txrx.c 	d->dma.d0 |= (tcp_hdr_len & DMA_CFG_DESC_TX_0_L4_LENGTH_MSK);
dma              1650 drivers/net/wireless/ath/wil6210/txrx.c 	d->dma.d0 |= (BIT(DMA_CFG_DESC_TX_0_TCP_SEG_EN_POS)) |
dma              1652 drivers/net/wireless/ath/wil6210/txrx.c 	d->dma.d0 |= (is_ipv4 << DMA_CFG_DESC_TX_0_IPV4_CHECKSUM_EN_POS);
dma              1654 drivers/net/wireless/ath/wil6210/txrx.c 	d->dma.ip_length = skb_net_hdr_len;
dma              1656 drivers/net/wireless/ath/wil6210/txrx.c 	d->dma.d0 |= BIT(DMA_CFG_DESC_TX_0_TCP_UDP_CHECKSUM_EN_POS);
dma              1658 drivers/net/wireless/ath/wil6210/txrx.c 	d->dma.d0 |= BIT(DMA_CFG_DESC_TX_0_PSEUDO_HEADER_CALC_EN_POS);
dma              1678 drivers/net/wireless/ath/wil6210/txrx.c 	d->dma.b11 = ETH_HLEN; /* MAC header length */
dma              1683 drivers/net/wireless/ath/wil6210/txrx.c 		d->dma.b11 |= BIT(DMA_CFG_DESC_TX_OFFLOAD_CFG_L3T_IPV4_POS);
dma              1694 drivers/net/wireless/ath/wil6210/txrx.c 		d->dma.d0 |= (2 << DMA_CFG_DESC_TX_0_L4_TYPE_POS);
dma              1696 drivers/net/wireless/ath/wil6210/txrx.c 		d->dma.d0 |=
dma              1701 drivers/net/wireless/ath/wil6210/txrx.c 		d->dma.d0 |=
dma              1708 drivers/net/wireless/ath/wil6210/txrx.c 	d->dma.ip_length = skb_network_header_len(skb);
dma              1710 drivers/net/wireless/ath/wil6210/txrx.c 	d->dma.d0 |= BIT(DMA_CFG_DESC_TX_0_TCP_UDP_CHECKSUM_EN_POS);
dma              1712 drivers/net/wireless/ath/wil6210/txrx.c 	d->dma.d0 |= BIT(DMA_CFG_DESC_TX_0_PSEUDO_HEADER_CALC_EN_POS);
dma              1719 drivers/net/wireless/ath/wil6210/txrx.c 	d->dma.d0 |= BIT(DMA_CFG_DESC_TX_0_CMD_EOP_POS) |
dma              1726 drivers/net/wireless/ath/wil6210/txrx.c 	d->dma.d0 |= wil_tso_type_lst <<
dma              2023 drivers/net/wireless/ath/wil6210/txrx.c 		_desc->dma.status = TX_DMA_STATUS_DU;
dma              2121 drivers/net/wireless/ath/wil6210/txrx.c 	d->dma.d0 |= BIT(DMA_CFG_DESC_TX_0_CMD_EOP_POS);
dma              2122 drivers/net/wireless/ath/wil6210/txrx.c 	d->dma.d0 |= BIT(DMA_CFG_DESC_TX_0_CMD_MARK_WB_POS);
dma              2123 drivers/net/wireless/ath/wil6210/txrx.c 	d->dma.d0 |= BIT(DMA_CFG_DESC_TX_0_CMD_DMA_IT_POS);
dma              2180 drivers/net/wireless/ath/wil6210/txrx.c 		_d->dma.status = TX_DMA_STATUS_DU;
dma              2470 drivers/net/wireless/ath/wil6210/txrx.c 		if (unlikely(!(_d->dma.status & TX_DMA_STATUS_DU)))
dma              2485 drivers/net/wireless/ath/wil6210/txrx.c 			dmalen = le16_to_cpu(d->dma.length);
dma              2487 drivers/net/wireless/ath/wil6210/txrx.c 					      d->dma.error);
dma              2491 drivers/net/wireless/ath/wil6210/txrx.c 				     d->dma.status, d->dma.error);
dma              2500 drivers/net/wireless/ath/wil6210/txrx.c 				if (likely(d->dma.error == 0)) {
dma              2519 drivers/net/wireless/ath/wil6210/txrx.c 				wil_consume_skb(skb, d->dma.error == 0);
dma               478 drivers/net/wireless/ath/wil6210/txrx.h 	struct vring_tx_dma dma;
dma               488 drivers/net/wireless/ath/wil6210/txrx.h 	struct vring_rx_dma dma;
dma                43 drivers/net/wireless/ath/wil6210/txrx_edma.c 	dma_addr_t pa = wil_tx_desc_get_addr_edma(&d->dma);
dma                44 drivers/net/wireless/ath/wil6210/txrx_edma.c 	u16 dmalen = le16_to_cpu(d->dma.length);
dma               212 drivers/net/wireless/ath/wil6210/txrx_edma.c 	wil_desc_set_addr_edma(&d->dma.addr, &d->dma.addr_high_high, pa);
dma               213 drivers/net/wireless/ath/wil6210/txrx_edma.c 	d->dma.length = cpu_to_le16(sz);
dma              1129 drivers/net/wireless/ath/wil6210/txrx_edma.c 	wil_desc_set_addr_edma(&d->dma.addr, &d->dma.addr_high_high, pa);
dma              1132 drivers/net/wireless/ath/wil6210/txrx_edma.c 	d->dma.length = cpu_to_le16((u16)len);
dma              1238 drivers/net/wireless/ath/wil6210/txrx_edma.c 			dmalen = le16_to_cpu(d->dma.length);
dma              1335 drivers/net/wireless/ath/wil6210/txrx_edma.c 	d->dma.l4_hdr_len |= tcp_hdr_len & DMA_CFG_DESC_TX_0_L4_LENGTH_MSK;
dma              1339 drivers/net/wireless/ath/wil6210/txrx_edma.c 	d->dma.cmd |= BIT(WIL_EDMA_DESC_TX_CFG_EOP_POS) |
dma              1345 drivers/net/wireless/ath/wil6210/txrx_edma.c 	d->dma.w1 |= BIT(WIL_EDMA_DESC_TX_CFG_PSEUDO_HEADER_CALC_EN_POS) |
dma              1348 drivers/net/wireless/ath/wil6210/txrx_edma.c 	d->dma.ip_length |= skb_net_hdr_len;
dma              1350 drivers/net/wireless/ath/wil6210/txrx_edma.c 	d->dma.b11 |= ETH_HLEN |
dma               121 drivers/net/wireless/ath/wil6210/txrx_edma.h 	struct wil_ring_rx_enhanced_dma dma;
dma               195 drivers/net/wireless/ath/wil6210/txrx_edma.h 	struct wil_ring_tx_enhanced_dma dma;
dma               591 drivers/net/wireless/ath/wil6210/txrx_edma.h dma_addr_t wil_tx_desc_get_addr_edma(struct wil_ring_tx_enhanced_dma *dma)
dma               593 drivers/net/wireless/ath/wil6210/txrx_edma.h 	return le32_to_cpu(dma->addr.addr_low) |
dma               594 drivers/net/wireless/ath/wil6210/txrx_edma.h 			   ((u64)le16_to_cpu(dma->addr.addr_high) << 32) |
dma               595 drivers/net/wireless/ath/wil6210/txrx_edma.h 			   ((u64)le16_to_cpu(dma->addr_high_high) << 48);
dma               599 drivers/net/wireless/ath/wil6210/txrx_edma.h dma_addr_t wil_rx_desc_get_addr_edma(struct wil_ring_rx_enhanced_dma *dma)
dma               601 drivers/net/wireless/ath/wil6210/txrx_edma.h 	return le32_to_cpu(dma->addr.addr_low) |
dma               602 drivers/net/wireless/ath/wil6210/txrx_edma.h 			   ((u64)le16_to_cpu(dma->addr.addr_high) << 32) |
dma               603 drivers/net/wireless/ath/wil6210/txrx_edma.h 			   ((u64)le16_to_cpu(dma->addr_high_high) << 48);
dma               849 drivers/net/wireless/broadcom/b43/b43.h 		struct b43_dma dma;
dma                37 drivers/net/wireless/broadcom/b43/dma.c static u32 b43_dma_address(struct b43_dma *dma, dma_addr_t dmaaddr,
dma                45 drivers/net/wireless/broadcom/b43/dma.c 		if (dma->translation_in_low) {
dma                47 drivers/net/wireless/broadcom/b43/dma.c 			addr |= dma->translation;
dma                52 drivers/net/wireless/broadcom/b43/dma.c 		if (!dma->translation_in_low) {
dma                54 drivers/net/wireless/broadcom/b43/dma.c 			addr |= dma->translation;
dma                58 drivers/net/wireless/broadcom/b43/dma.c 		if (dma->translation_in_low)
dma                99 drivers/net/wireless/broadcom/b43/dma.c 	addr = b43_dma_address(&ring->dev->dma, dmaaddr, B43_DMA_ADDR_LOW);
dma               100 drivers/net/wireless/broadcom/b43/dma.c 	addrext = b43_dma_address(&ring->dev->dma, dmaaddr, B43_DMA_ADDR_EXT);
dma               191 drivers/net/wireless/broadcom/b43/dma.c 	addrlo = b43_dma_address(&ring->dev->dma, dmaaddr, B43_DMA_ADDR_LOW);
dma               192 drivers/net/wireless/broadcom/b43/dma.c 	addrhi = b43_dma_address(&ring->dev->dma, dmaaddr, B43_DMA_ADDR_HIGH);
dma               193 drivers/net/wireless/broadcom/b43/dma.c 	addrext = b43_dma_address(&ring->dev->dma, dmaaddr, B43_DMA_ADDR_EXT);
dma               677 drivers/net/wireless/broadcom/b43/dma.c 	bool parity = ring->dev->dma.parity;
dma               684 drivers/net/wireless/broadcom/b43/dma.c 			addrext = b43_dma_address(&ring->dev->dma, ringbase, B43_DMA_ADDR_EXT);
dma               685 drivers/net/wireless/broadcom/b43/dma.c 			addrlo = b43_dma_address(&ring->dev->dma, ringbase, B43_DMA_ADDR_LOW);
dma               686 drivers/net/wireless/broadcom/b43/dma.c 			addrhi = b43_dma_address(&ring->dev->dma, ringbase, B43_DMA_ADDR_HIGH);
dma               698 drivers/net/wireless/broadcom/b43/dma.c 			addrext = b43_dma_address(&ring->dev->dma, ringbase, B43_DMA_ADDR_EXT);
dma               699 drivers/net/wireless/broadcom/b43/dma.c 			addrlo = b43_dma_address(&ring->dev->dma, ringbase, B43_DMA_ADDR_LOW);
dma               715 drivers/net/wireless/broadcom/b43/dma.c 			addrext = b43_dma_address(&ring->dev->dma, ringbase, B43_DMA_ADDR_EXT);
dma               716 drivers/net/wireless/broadcom/b43/dma.c 			addrlo = b43_dma_address(&ring->dev->dma, ringbase, B43_DMA_ADDR_LOW);
dma               717 drivers/net/wireless/broadcom/b43/dma.c 			addrhi = b43_dma_address(&ring->dev->dma, ringbase, B43_DMA_ADDR_HIGH);
dma               732 drivers/net/wireless/broadcom/b43/dma.c 			addrext = b43_dma_address(&ring->dev->dma, ringbase, B43_DMA_ADDR_EXT);
dma               733 drivers/net/wireless/broadcom/b43/dma.c 			addrlo = b43_dma_address(&ring->dev->dma, ringbase, B43_DMA_ADDR_LOW);
dma              1012 drivers/net/wireless/broadcom/b43/dma.c #define destroy_ring(dma, ring) do {				\
dma              1013 drivers/net/wireless/broadcom/b43/dma.c 	b43_destroy_dmaring((dma)->ring, __stringify(ring));	\
dma              1014 drivers/net/wireless/broadcom/b43/dma.c 	(dma)->ring = NULL;					\
dma              1019 drivers/net/wireless/broadcom/b43/dma.c 	struct b43_dma *dma;
dma              1023 drivers/net/wireless/broadcom/b43/dma.c 	dma = &dev->dma;
dma              1025 drivers/net/wireless/broadcom/b43/dma.c 	destroy_ring(dma, rx_ring);
dma              1026 drivers/net/wireless/broadcom/b43/dma.c 	destroy_ring(dma, tx_ring_AC_BK);
dma              1027 drivers/net/wireless/broadcom/b43/dma.c 	destroy_ring(dma, tx_ring_AC_BE);
dma              1028 drivers/net/wireless/broadcom/b43/dma.c 	destroy_ring(dma, tx_ring_AC_VI);
dma              1029 drivers/net/wireless/broadcom/b43/dma.c 	destroy_ring(dma, tx_ring_AC_VO);
dma              1030 drivers/net/wireless/broadcom/b43/dma.c 	destroy_ring(dma, tx_ring_mcast);
dma              1054 drivers/net/wireless/broadcom/b43/dma.c 	struct b43_dma *dma = &dev->dma;
dma              1068 drivers/net/wireless/broadcom/b43/dma.c 		dma->translation = bcma_core_dma_translation(dev->dev->bdev);
dma              1073 drivers/net/wireless/broadcom/b43/dma.c 		dma->translation = ssb_dma_translation(dev->dev->sdev);
dma              1077 drivers/net/wireless/broadcom/b43/dma.c 	dma->translation_in_low = b43_dma_translation_in_low_word(dev, type);
dma              1079 drivers/net/wireless/broadcom/b43/dma.c 	dma->parity = true;
dma              1083 drivers/net/wireless/broadcom/b43/dma.c 		dma->parity = false;
dma              1088 drivers/net/wireless/broadcom/b43/dma.c 	dma->tx_ring_AC_BK = b43_setup_dmaring(dev, 0, 1, type);
dma              1089 drivers/net/wireless/broadcom/b43/dma.c 	if (!dma->tx_ring_AC_BK)
dma              1092 drivers/net/wireless/broadcom/b43/dma.c 	dma->tx_ring_AC_BE = b43_setup_dmaring(dev, 1, 1, type);
dma              1093 drivers/net/wireless/broadcom/b43/dma.c 	if (!dma->tx_ring_AC_BE)
dma              1096 drivers/net/wireless/broadcom/b43/dma.c 	dma->tx_ring_AC_VI = b43_setup_dmaring(dev, 2, 1, type);
dma              1097 drivers/net/wireless/broadcom/b43/dma.c 	if (!dma->tx_ring_AC_VI)
dma              1100 drivers/net/wireless/broadcom/b43/dma.c 	dma->tx_ring_AC_VO = b43_setup_dmaring(dev, 3, 1, type);
dma              1101 drivers/net/wireless/broadcom/b43/dma.c 	if (!dma->tx_ring_AC_VO)
dma              1104 drivers/net/wireless/broadcom/b43/dma.c 	dma->tx_ring_mcast = b43_setup_dmaring(dev, 4, 1, type);
dma              1105 drivers/net/wireless/broadcom/b43/dma.c 	if (!dma->tx_ring_mcast)
dma              1109 drivers/net/wireless/broadcom/b43/dma.c 	dma->rx_ring = b43_setup_dmaring(dev, 0, 0, type);
dma              1110 drivers/net/wireless/broadcom/b43/dma.c 	if (!dma->rx_ring)
dma              1123 drivers/net/wireless/broadcom/b43/dma.c 	destroy_ring(dma, tx_ring_mcast);
dma              1125 drivers/net/wireless/broadcom/b43/dma.c 	destroy_ring(dma, tx_ring_AC_VO);
dma              1127 drivers/net/wireless/broadcom/b43/dma.c 	destroy_ring(dma, tx_ring_AC_VI);
dma              1129 drivers/net/wireless/broadcom/b43/dma.c 	destroy_ring(dma, tx_ring_AC_BE);
dma              1131 drivers/net/wireless/broadcom/b43/dma.c 	destroy_ring(dma, tx_ring_AC_BK);
dma              1159 drivers/net/wireless/broadcom/b43/dma.c 	struct b43_dma *dma = &dev->dma;
dma              1164 drivers/net/wireless/broadcom/b43/dma.c 		ring = dma->tx_ring_AC_BK;
dma              1167 drivers/net/wireless/broadcom/b43/dma.c 		ring = dma->tx_ring_AC_BE;
dma              1170 drivers/net/wireless/broadcom/b43/dma.c 		ring = dma->tx_ring_AC_VI;
dma              1173 drivers/net/wireless/broadcom/b43/dma.c 		ring = dma->tx_ring_AC_VO;
dma              1176 drivers/net/wireless/broadcom/b43/dma.c 		ring = dma->tx_ring_mcast;
dma              1322 drivers/net/wireless/broadcom/b43/dma.c 			ring = dev->dma.tx_ring_AC_VO;
dma              1325 drivers/net/wireless/broadcom/b43/dma.c 			ring = dev->dma.tx_ring_AC_VI;
dma              1328 drivers/net/wireless/broadcom/b43/dma.c 			ring = dev->dma.tx_ring_AC_BE;
dma              1331 drivers/net/wireless/broadcom/b43/dma.c 			ring = dev->dma.tx_ring_AC_BK;
dma              1335 drivers/net/wireless/broadcom/b43/dma.c 		ring = dev->dma.tx_ring_AC_BE;
dma              1350 drivers/net/wireless/broadcom/b43/dma.c 		ring = dev->dma.tx_ring_mcast;
dma              1722 drivers/net/wireless/broadcom/b43/dma.c 	b43_dma_tx_suspend_ring(dev->dma.tx_ring_AC_BK);
dma              1723 drivers/net/wireless/broadcom/b43/dma.c 	b43_dma_tx_suspend_ring(dev->dma.tx_ring_AC_BE);
dma              1724 drivers/net/wireless/broadcom/b43/dma.c 	b43_dma_tx_suspend_ring(dev->dma.tx_ring_AC_VI);
dma              1725 drivers/net/wireless/broadcom/b43/dma.c 	b43_dma_tx_suspend_ring(dev->dma.tx_ring_AC_VO);
dma              1726 drivers/net/wireless/broadcom/b43/dma.c 	b43_dma_tx_suspend_ring(dev->dma.tx_ring_mcast);
dma              1731 drivers/net/wireless/broadcom/b43/dma.c 	b43_dma_tx_resume_ring(dev->dma.tx_ring_mcast);
dma              1732 drivers/net/wireless/broadcom/b43/dma.c 	b43_dma_tx_resume_ring(dev->dma.tx_ring_AC_VO);
dma              1733 drivers/net/wireless/broadcom/b43/dma.c 	b43_dma_tx_resume_ring(dev->dma.tx_ring_AC_VI);
dma              1734 drivers/net/wireless/broadcom/b43/dma.c 	b43_dma_tx_resume_ring(dev->dma.tx_ring_AC_BE);
dma              1735 drivers/net/wireless/broadcom/b43/dma.c 	b43_dma_tx_resume_ring(dev->dma.tx_ring_AC_BK);
dma              2025 drivers/net/wireless/broadcom/b43/main.c 		b43_dma_handle_rx_overflow(dev->dma.rx_ring);
dma              2031 drivers/net/wireless/broadcom/b43/main.c 			b43_dma_rx(dev->dma.rx_ring);
dma               698 drivers/net/wireless/broadcom/b43legacy/b43legacy.h 		struct b43legacy_dma dma;
dma                62 drivers/net/wireless/broadcom/b43legacy/dma.c 	addr |= ring->dev->dma.translation;
dma               186 drivers/net/wireless/broadcom/b43legacy/dma.c return dev->dma.tx_ring1;
dma               194 drivers/net/wireless/broadcom/b43legacy/dma.c 		ring = dev->dma.tx_ring3;
dma               197 drivers/net/wireless/broadcom/b43legacy/dma.c 		ring = dev->dma.tx_ring2;
dma               200 drivers/net/wireless/broadcom/b43legacy/dma.c 		ring = dev->dma.tx_ring1;
dma               203 drivers/net/wireless/broadcom/b43legacy/dma.c 		ring = dev->dma.tx_ring0;
dma               206 drivers/net/wireless/broadcom/b43legacy/dma.c 		ring = dev->dma.tx_ring4;
dma               209 drivers/net/wireless/broadcom/b43legacy/dma.c 		ring = dev->dma.tx_ring5;
dma               532 drivers/net/wireless/broadcom/b43legacy/dma.c 	u32 trans = ring->dev->dma.translation;
dma               751 drivers/net/wireless/broadcom/b43legacy/dma.c 	struct b43legacy_dma *dma;
dma               755 drivers/net/wireless/broadcom/b43legacy/dma.c 	dma = &dev->dma;
dma               757 drivers/net/wireless/broadcom/b43legacy/dma.c 	b43legacy_destroy_dmaring(dma->rx_ring3);
dma               758 drivers/net/wireless/broadcom/b43legacy/dma.c 	dma->rx_ring3 = NULL;
dma               759 drivers/net/wireless/broadcom/b43legacy/dma.c 	b43legacy_destroy_dmaring(dma->rx_ring0);
dma               760 drivers/net/wireless/broadcom/b43legacy/dma.c 	dma->rx_ring0 = NULL;
dma               762 drivers/net/wireless/broadcom/b43legacy/dma.c 	b43legacy_destroy_dmaring(dma->tx_ring5);
dma               763 drivers/net/wireless/broadcom/b43legacy/dma.c 	dma->tx_ring5 = NULL;
dma               764 drivers/net/wireless/broadcom/b43legacy/dma.c 	b43legacy_destroy_dmaring(dma->tx_ring4);
dma               765 drivers/net/wireless/broadcom/b43legacy/dma.c 	dma->tx_ring4 = NULL;
dma               766 drivers/net/wireless/broadcom/b43legacy/dma.c 	b43legacy_destroy_dmaring(dma->tx_ring3);
dma               767 drivers/net/wireless/broadcom/b43legacy/dma.c 	dma->tx_ring3 = NULL;
dma               768 drivers/net/wireless/broadcom/b43legacy/dma.c 	b43legacy_destroy_dmaring(dma->tx_ring2);
dma               769 drivers/net/wireless/broadcom/b43legacy/dma.c 	dma->tx_ring2 = NULL;
dma               770 drivers/net/wireless/broadcom/b43legacy/dma.c 	b43legacy_destroy_dmaring(dma->tx_ring1);
dma               771 drivers/net/wireless/broadcom/b43legacy/dma.c 	dma->tx_ring1 = NULL;
dma               772 drivers/net/wireless/broadcom/b43legacy/dma.c 	b43legacy_destroy_dmaring(dma->tx_ring0);
dma               773 drivers/net/wireless/broadcom/b43legacy/dma.c 	dma->tx_ring0 = NULL;
dma               778 drivers/net/wireless/broadcom/b43legacy/dma.c 	struct b43legacy_dma *dma = &dev->dma;
dma               796 drivers/net/wireless/broadcom/b43legacy/dma.c 	dma->translation = ssb_dma_translation(dev->dev);
dma               803 drivers/net/wireless/broadcom/b43legacy/dma.c 	dma->tx_ring0 = ring;
dma               808 drivers/net/wireless/broadcom/b43legacy/dma.c 	dma->tx_ring1 = ring;
dma               813 drivers/net/wireless/broadcom/b43legacy/dma.c 	dma->tx_ring2 = ring;
dma               818 drivers/net/wireless/broadcom/b43legacy/dma.c 	dma->tx_ring3 = ring;
dma               823 drivers/net/wireless/broadcom/b43legacy/dma.c 	dma->tx_ring4 = ring;
dma               828 drivers/net/wireless/broadcom/b43legacy/dma.c 	dma->tx_ring5 = ring;
dma               834 drivers/net/wireless/broadcom/b43legacy/dma.c 	dma->rx_ring0 = ring;
dma               840 drivers/net/wireless/broadcom/b43legacy/dma.c 		dma->rx_ring3 = ring;
dma               849 drivers/net/wireless/broadcom/b43legacy/dma.c 	b43legacy_destroy_dmaring(dma->rx_ring0);
dma               850 drivers/net/wireless/broadcom/b43legacy/dma.c 	dma->rx_ring0 = NULL;
dma               852 drivers/net/wireless/broadcom/b43legacy/dma.c 	b43legacy_destroy_dmaring(dma->tx_ring5);
dma               853 drivers/net/wireless/broadcom/b43legacy/dma.c 	dma->tx_ring5 = NULL;
dma               855 drivers/net/wireless/broadcom/b43legacy/dma.c 	b43legacy_destroy_dmaring(dma->tx_ring4);
dma               856 drivers/net/wireless/broadcom/b43legacy/dma.c 	dma->tx_ring4 = NULL;
dma               858 drivers/net/wireless/broadcom/b43legacy/dma.c 	b43legacy_destroy_dmaring(dma->tx_ring3);
dma               859 drivers/net/wireless/broadcom/b43legacy/dma.c 	dma->tx_ring3 = NULL;
dma               861 drivers/net/wireless/broadcom/b43legacy/dma.c 	b43legacy_destroy_dmaring(dma->tx_ring2);
dma               862 drivers/net/wireless/broadcom/b43legacy/dma.c 	dma->tx_ring2 = NULL;
dma               864 drivers/net/wireless/broadcom/b43legacy/dma.c 	b43legacy_destroy_dmaring(dma->tx_ring1);
dma               865 drivers/net/wireless/broadcom/b43legacy/dma.c 	dma->tx_ring1 = NULL;
dma               867 drivers/net/wireless/broadcom/b43legacy/dma.c 	b43legacy_destroy_dmaring(dma->tx_ring0);
dma               868 drivers/net/wireless/broadcom/b43legacy/dma.c 	dma->tx_ring0 = NULL;
dma               915 drivers/net/wireless/broadcom/b43legacy/dma.c 	struct b43legacy_dma *dma = &dev->dma;
dma               920 drivers/net/wireless/broadcom/b43legacy/dma.c 		ring = dma->tx_ring0;
dma               923 drivers/net/wireless/broadcom/b43legacy/dma.c 		ring = dma->tx_ring1;
dma               926 drivers/net/wireless/broadcom/b43legacy/dma.c 		ring = dma->tx_ring2;
dma               929 drivers/net/wireless/broadcom/b43legacy/dma.c 		ring = dma->tx_ring3;
dma               932 drivers/net/wireless/broadcom/b43legacy/dma.c 		ring = dma->tx_ring4;
dma               935 drivers/net/wireless/broadcom/b43legacy/dma.c 		ring = dma->tx_ring5;
dma              1374 drivers/net/wireless/broadcom/b43legacy/dma.c 	b43legacy_dma_tx_suspend_ring(dev->dma.tx_ring0);
dma              1375 drivers/net/wireless/broadcom/b43legacy/dma.c 	b43legacy_dma_tx_suspend_ring(dev->dma.tx_ring1);
dma              1376 drivers/net/wireless/broadcom/b43legacy/dma.c 	b43legacy_dma_tx_suspend_ring(dev->dma.tx_ring2);
dma              1377 drivers/net/wireless/broadcom/b43legacy/dma.c 	b43legacy_dma_tx_suspend_ring(dev->dma.tx_ring3);
dma              1378 drivers/net/wireless/broadcom/b43legacy/dma.c 	b43legacy_dma_tx_suspend_ring(dev->dma.tx_ring4);
dma              1379 drivers/net/wireless/broadcom/b43legacy/dma.c 	b43legacy_dma_tx_suspend_ring(dev->dma.tx_ring5);
dma              1384 drivers/net/wireless/broadcom/b43legacy/dma.c 	b43legacy_dma_tx_resume_ring(dev->dma.tx_ring5);
dma              1385 drivers/net/wireless/broadcom/b43legacy/dma.c 	b43legacy_dma_tx_resume_ring(dev->dma.tx_ring4);
dma              1386 drivers/net/wireless/broadcom/b43legacy/dma.c 	b43legacy_dma_tx_resume_ring(dev->dma.tx_ring3);
dma              1387 drivers/net/wireless/broadcom/b43legacy/dma.c 	b43legacy_dma_tx_resume_ring(dev->dma.tx_ring2);
dma              1388 drivers/net/wireless/broadcom/b43legacy/dma.c 	b43legacy_dma_tx_resume_ring(dev->dma.tx_ring1);
dma              1389 drivers/net/wireless/broadcom/b43legacy/dma.c 	b43legacy_dma_tx_resume_ring(dev->dma.tx_ring0);
dma              1353 drivers/net/wireless/broadcom/b43legacy/main.c 			b43legacy_dma_rx(dev->dma.rx_ring0);
dma              1361 drivers/net/wireless/broadcom/b43legacy/main.c 			b43legacy_dma_rx(dev->dma.rx_ring3);
dma               209 drivers/net/wireless/broadcom/brcm80211/brcmsmac/dma.c 	struct dma_pub dma; /* exported structure */
dma               355 drivers/net/wireless/broadcom/brcm80211/brcmsmac/dma.c 	dmactrlflags = di->dma.dmactrlflags;
dma               378 drivers/net/wireless/broadcom/brcm80211/brcmsmac/dma.c 	di->dma.dmactrlflags = dmactrlflags;
dma               583 drivers/net/wireless/broadcom/brcm80211/brcmsmac/dma.c 		      di->dma.dmactrlflags, ntxd, nrxd, rxbufsize,
dma               739 drivers/net/wireless/broadcom/brcm80211/brcmsmac/dma.c 	if (di->dma.dmactrlflags & DMA_CTRL_PEN) {
dma               749 drivers/net/wireless/broadcom/brcm80211/brcmsmac/dma.c 	struct dma_info *di = container_of(pub, struct dma_info, dma);
dma               824 drivers/net/wireless/broadcom/brcm80211/brcmsmac/dma.c 	uint dmactrlflags = di->dma.dmactrlflags;
dma               845 drivers/net/wireless/broadcom/brcm80211/brcmsmac/dma.c 	struct dma_info *di = container_of(pub, struct dma_info, dma);
dma               927 drivers/net/wireless/broadcom/brcm80211/brcmsmac/dma.c 	struct dma_info *di = container_of(pub, struct dma_info, dma);
dma               975 drivers/net/wireless/broadcom/brcm80211/brcmsmac/dma.c 		if ((di->dma.dmactrlflags & DMA_CTRL_RXMULTI) == 0) {
dma               982 drivers/net/wireless/broadcom/brcm80211/brcmsmac/dma.c 			di->dma.rxgiants++;
dma              1025 drivers/net/wireless/broadcom/brcm80211/brcmsmac/dma.c 	struct dma_info *di = container_of(pub, struct dma_info, dma);
dma              1068 drivers/net/wireless/broadcom/brcm80211/brcmsmac/dma.c 			di->dma.rxnobuf++;
dma              1111 drivers/net/wireless/broadcom/brcm80211/brcmsmac/dma.c 	struct dma_info *di = container_of(pub, struct dma_info, dma);
dma              1131 drivers/net/wireless/broadcom/brcm80211/brcmsmac/dma.c 	struct dma_info *di = container_of(pub, struct dma_info, dma);
dma              1134 drivers/net/wireless/broadcom/brcm80211/brcmsmac/dma.c 		return (unsigned long)&(di->dma.txavail);
dma              1142 drivers/net/wireless/broadcom/brcm80211/brcmsmac/dma.c 	struct dma_info *di = container_of(pub, struct dma_info, dma);
dma              1151 drivers/net/wireless/broadcom/brcm80211/brcmsmac/dma.c 	di->dma.txavail = di->ntxd - 1;
dma              1162 drivers/net/wireless/broadcom/brcm80211/brcmsmac/dma.c 	if ((di->dma.dmactrlflags & DMA_CTRL_PEN) == 0)
dma              1175 drivers/net/wireless/broadcom/brcm80211/brcmsmac/dma.c 	struct dma_info *di = container_of(pub, struct dma_info, dma);
dma              1187 drivers/net/wireless/broadcom/brcm80211/brcmsmac/dma.c 	struct dma_info *di = container_of(pub, struct dma_info, dma);
dma              1199 drivers/net/wireless/broadcom/brcm80211/brcmsmac/dma.c 	struct dma_info *di = container_of(pub, struct dma_info, dma);
dma              1209 drivers/net/wireless/broadcom/brcm80211/brcmsmac/dma.c 	struct dma_info *di = container_of(pub, struct dma_info, dma);
dma              1223 drivers/net/wireless/broadcom/brcm80211/brcmsmac/dma.c 		if (!(di->dma.dmactrlflags & DMA_CTRL_UNFRAMED))
dma              1230 drivers/net/wireless/broadcom/brcm80211/brcmsmac/dma.c 	struct dma_info *di = container_of(pub, struct dma_info, dma);
dma              1257 drivers/net/wireless/broadcom/brcm80211/brcmsmac/dma.c 	struct dma_info *di = container_of(pub, struct dma_info, dma);
dma              1369 drivers/net/wireless/broadcom/brcm80211/brcmsmac/dma.c 	di->dma.txavail = di->ntxd - ntxdactive(di, di->txin, di->txout) -
dma              1382 drivers/net/wireless/broadcom/brcm80211/brcmsmac/dma.c 	struct dma_info *di = container_of(pub, struct dma_info, dma);
dma              1392 drivers/net/wireless/broadcom/brcm80211/brcmsmac/dma.c 	if (di->dma.txavail == 0 || nexttxd(di, di->txout) == di->txin)
dma              1413 drivers/net/wireless/broadcom/brcm80211/brcmsmac/dma.c 		    di->dma.txavail == 0 || dma64_txidle(di))
dma              1425 drivers/net/wireless/broadcom/brcm80211/brcmsmac/dma.c 	di->dma.txavail = 0;
dma              1426 drivers/net/wireless/broadcom/brcm80211/brcmsmac/dma.c 	di->dma.txnobuf++;
dma              1432 drivers/net/wireless/broadcom/brcm80211/brcmsmac/dma.c 	struct dma_info *di = container_of(pub, struct dma_info, dma);
dma              1441 drivers/net/wireless/broadcom/brcm80211/brcmsmac/dma.c 	struct dma_info *di = container_of(pub, struct dma_info, dma);
dma              1451 drivers/net/wireless/broadcom/brcm80211/brcmsmac/dma.c 	struct dma_info *di = container_of(pub, struct dma_info, dma);
dma              1470 drivers/net/wireless/broadcom/brcm80211/brcmsmac/dma.c 	struct dma_info *di = container_of(pub, struct dma_info, dma);
dma              1552 drivers/net/wireless/broadcom/brcm80211/brcmsmac/dma.c 	struct dma_info *di = container_of(dmah, struct dma_info, dma);
dma               838 drivers/net/wireless/broadcom/brcm80211/brcmsmac/main.c 	struct dma_pub *dma = NULL;
dma               874 drivers/net/wireless/broadcom/brcm80211/brcmsmac/main.c 	dma = wlc->hw->di[queue];
dma              1016 drivers/net/wireless/broadcom/brcm80211/brcmsmac/main.c 	if (dma && queue < NFIFO) {
dma              1018 drivers/net/wireless/broadcom/brcm80211/brcmsmac/main.c 		if (dma->txavail > TX_HEADROOM && queue < TX_BCMC_FIFO &&
dma              1021 drivers/net/wireless/broadcom/brcm80211/brcmsmac/main.c 		dma_kick_tx(dma);
dma              6852 drivers/net/wireless/broadcom/brcm80211/brcmsmac/main.c 	struct dma_pub *dma;
dma              6858 drivers/net/wireless/broadcom/brcm80211/brcmsmac/main.c 	dma = wlc->hw->di[fifo];
dma              6861 drivers/net/wireless/broadcom/brcm80211/brcmsmac/main.c 	if (dma->txavail == 0) {
dma              6923 drivers/net/wireless/broadcom/brcm80211/brcmsmac/main.c 	struct dma_pub *dma = wlc->hw->di[fifo];
dma              6927 drivers/net/wireless/broadcom/brcm80211/brcmsmac/main.c 	ret = dma_txfast(wlc, dma, p);
dma              6937 drivers/net/wireless/broadcom/brcm80211/brcmsmac/main.c 	if (dma->txavail <= TX_HEADROOM && fifo < TX_BCMC_FIFO &&
dma              1917 drivers/net/wireless/intel/iwlegacy/4965-mac.c 	ptr->addr = dma_alloc_coherent(&il->pci_dev->dev, size, &ptr->dma,
dma              1931 drivers/net/wireless/intel/iwlegacy/4965-mac.c 	dma_free_coherent(&il->pci_dev->dev, ptr->size, ptr->addr, ptr->dma);
dma              2002 drivers/net/wireless/intel/iwlegacy/4965-mac.c 	il_wr(il, FH49_KW_MEM_ADDR_REG, il->kw.dma >> 4);
dma              2037 drivers/net/wireless/intel/iwlegacy/4965-mac.c 	il_wr(il, FH49_KW_MEM_ADDR_REG, il->kw.dma >> 4);
dma              5187 drivers/net/wireless/intel/iwlegacy/4965-mac.c 	il_wr_prph(il, IL49_SCD_DRAM_BASE_ADDR, il->scd_bc_tbls.dma >> 10);
dma               877 drivers/net/wireless/intel/iwlegacy/common.h 	dma_addr_t dma;
dma               245 drivers/net/wireless/intel/iwlwifi/pcie/internal.h 	dma_addr_t dma;
dma              1254 drivers/net/wireless/intel/iwlwifi/pcie/tx-gen2.c 	cmd.byte_cnt_addr = cpu_to_le64(txq->bc_tbl.dma);
dma               167 drivers/net/wireless/intel/iwlwifi/pcie/tx.c 				       &ptr->dma, GFP_KERNEL);
dma               179 drivers/net/wireless/intel/iwlwifi/pcie/tx.c 	dma_free_coherent(trans->dev, ptr->size, ptr->addr, ptr->dma);
dma               773 drivers/net/wireless/intel/iwlwifi/pcie/tx.c 		       trans_pcie->scd_bc_tbls.dma >> 10);
dma               835 drivers/net/wireless/intel/iwlwifi/pcie/tx.c 			   trans_pcie->kw.dma >> 4);
dma              1036 drivers/net/wireless/intel/iwlwifi/pcie/tx.c 			   trans_pcie->kw.dma >> 4);
dma               150 drivers/net/wireless/marvell/mwl8k.c 		DEFINE_DMA_UNMAP_ADDR(dma);
dma              1229 drivers/net/wireless/marvell/mwl8k.c 		dma_unmap_addr_set(&rxq->buf[rx], dma, addr);
dma              1253 drivers/net/wireless/marvell/mwl8k.c 					 dma_unmap_addr(&rxq->buf[i], dma),
dma              1255 drivers/net/wireless/marvell/mwl8k.c 			dma_unmap_addr_set(&rxq->buf[i], dma, 0);
dma              1347 drivers/net/wireless/marvell/mwl8k.c 				 dma_unmap_addr(&rxq->buf[rxq->head], dma),
dma              1349 drivers/net/wireless/marvell/mwl8k.c 		dma_unmap_addr_set(&rxq->buf[rxq->head], dma, 0);
dma              1904 drivers/net/wireless/marvell/mwl8k.c 	dma_addr_t dma;
dma              2044 drivers/net/wireless/marvell/mwl8k.c 	dma = pci_map_single(priv->pdev, skb->data,
dma              2047 drivers/net/wireless/marvell/mwl8k.c 	if (pci_dma_mapping_error(priv->pdev, dma)) {
dma              2080 drivers/net/wireless/marvell/mwl8k.c 			pci_unmap_single(priv->pdev, dma, skb->len,
dma              2094 drivers/net/wireless/marvell/mwl8k.c 	tx->pkt_phys_addr = cpu_to_le32(dma);
dma                38 drivers/net/wireless/mediatek/mt7601u/mt7601u.h 	dma_addr_t dma;
dma                41 drivers/net/wireless/mediatek/mt7601u/usb.c 	buf->buf = usb_alloc_coherent(usb_dev, buf->len, GFP_KERNEL, &buf->dma);
dma                50 drivers/net/wireless/mediatek/mt7601u/usb.c 	usb_free_coherent(usb_dev, buf->len, buf->buf, buf->dma);
dma                69 drivers/net/wireless/mediatek/mt7601u/usb.c 	buf->urb->transfer_dma = buf->dma;
dma               105 drivers/net/wireless/ralink/rt2x00/rt2x00mmio.c 	dma_addr_t dma;
dma               112 drivers/net/wireless/ralink/rt2x00/rt2x00mmio.c 				  queue->limit * queue->desc_size, &dma,
dma               123 drivers/net/wireless/ralink/rt2x00/rt2x00mmio.c 		entry_priv->desc_dma = dma + i * queue->desc_size;
dma               862 drivers/net/wireless/realtek/rtl818x/rtl8180/dev.c 				  priv->tx_ring[1].dma);
dma               864 drivers/net/wireless/realtek/rtl818x/rtl8180/dev.c 				  priv->tx_ring[0].dma);
dma               867 drivers/net/wireless/realtek/rtl818x/rtl8180/dev.c 				  priv->tx_ring[4].dma);
dma               869 drivers/net/wireless/realtek/rtl818x/rtl8180/dev.c 				  priv->tx_ring[0].dma);
dma               871 drivers/net/wireless/realtek/rtl818x/rtl8180/dev.c 				  priv->tx_ring[1].dma);
dma               873 drivers/net/wireless/realtek/rtl818x/rtl8180/dev.c 				  priv->tx_ring[2].dma);
dma               875 drivers/net/wireless/realtek/rtl818x/rtl8180/dev.c 				  priv->tx_ring[3].dma);
dma              1073 drivers/net/wireless/realtek/rtl818x/rtl8180/dev.c 	dma_addr_t dma;
dma              1077 drivers/net/wireless/realtek/rtl818x/rtl8180/dev.c 				     &dma);
dma              1085 drivers/net/wireless/realtek/rtl818x/rtl8180/dev.c 	priv->tx_ring[prio].dma = dma;
dma              1092 drivers/net/wireless/realtek/rtl818x/rtl8180/dev.c 			cpu_to_le32((u32)dma + ((i + 1) % entries) * sizeof(*ring));
dma              1113 drivers/net/wireless/realtek/rtl818x/rtl8180/dev.c 			    ring->desc, ring->dma);
dma                90 drivers/net/wireless/realtek/rtl818x/rtl8180/rtl8180.h 	dma_addr_t dma;
dma              1248 drivers/net/wireless/realtek/rtlwifi/pci.c 	rtlpci->tx_ring[prio].dma = desc_dma;
dma              1287 drivers/net/wireless/realtek/rtlwifi/pci.c 					  &rtlpci->rx_ring[rxring_idx].dma);
dma              1310 drivers/net/wireless/realtek/rtlwifi/pci.c 					  &rtlpci->rx_ring[rxring_idx].dma);
dma              1362 drivers/net/wireless/realtek/rtlwifi/pci.c 			    ring->desc, ring->dma);
dma              1395 drivers/net/wireless/realtek/rtlwifi/pci.c 				    rtlpci->rx_ring[rxring_idx].dma);
dma              1402 drivers/net/wireless/realtek/rtlwifi/pci.c 				    rtlpci->rx_ring[rxring_idx].dma);
dma               147 drivers/net/wireless/realtek/rtlwifi/pci.h 	dma_addr_t dma;
dma               160 drivers/net/wireless/realtek/rtlwifi/pci.h 	dma_addr_t dma;
dma               891 drivers/net/wireless/realtek/rtlwifi/rtl8188ee/hw.c 			((u64) rtlpci->tx_ring[BEACON_QUEUE].dma) &
dma               894 drivers/net/wireless/realtek/rtlwifi/rtl8188ee/hw.c 			(u64) rtlpci->tx_ring[MGNT_QUEUE].dma &
dma               897 drivers/net/wireless/realtek/rtlwifi/rtl8188ee/hw.c 			(u64) rtlpci->tx_ring[VO_QUEUE].dma & DMA_BIT_MASK(32));
dma               899 drivers/net/wireless/realtek/rtlwifi/rtl8188ee/hw.c 			(u64) rtlpci->tx_ring[VI_QUEUE].dma & DMA_BIT_MASK(32));
dma               901 drivers/net/wireless/realtek/rtlwifi/rtl8188ee/hw.c 			(u64) rtlpci->tx_ring[BE_QUEUE].dma & DMA_BIT_MASK(32));
dma               903 drivers/net/wireless/realtek/rtlwifi/rtl8188ee/hw.c 			(u64) rtlpci->tx_ring[BK_QUEUE].dma & DMA_BIT_MASK(32));
dma               905 drivers/net/wireless/realtek/rtlwifi/rtl8188ee/hw.c 			(u64) rtlpci->tx_ring[HIGH_QUEUE].dma &
dma               908 drivers/net/wireless/realtek/rtlwifi/rtl8188ee/hw.c 			(u64) rtlpci->rx_ring[RX_MPDU_QUEUE].dma &
dma               737 drivers/net/wireless/realtek/rtlwifi/rtl8192ce/hw.c 			((u64) rtlpci->tx_ring[BEACON_QUEUE].dma) &
dma               740 drivers/net/wireless/realtek/rtlwifi/rtl8192ce/hw.c 			(u64) rtlpci->tx_ring[MGNT_QUEUE].dma &
dma               743 drivers/net/wireless/realtek/rtlwifi/rtl8192ce/hw.c 			(u64) rtlpci->tx_ring[VO_QUEUE].dma & DMA_BIT_MASK(32));
dma               745 drivers/net/wireless/realtek/rtlwifi/rtl8192ce/hw.c 			(u64) rtlpci->tx_ring[VI_QUEUE].dma & DMA_BIT_MASK(32));
dma               747 drivers/net/wireless/realtek/rtlwifi/rtl8192ce/hw.c 			(u64) rtlpci->tx_ring[BE_QUEUE].dma & DMA_BIT_MASK(32));
dma               749 drivers/net/wireless/realtek/rtlwifi/rtl8192ce/hw.c 			(u64) rtlpci->tx_ring[BK_QUEUE].dma & DMA_BIT_MASK(32));
dma               751 drivers/net/wireless/realtek/rtlwifi/rtl8192ce/hw.c 			(u64) rtlpci->tx_ring[HIGH_QUEUE].dma &
dma               754 drivers/net/wireless/realtek/rtlwifi/rtl8192ce/hw.c 			(u64) rtlpci->rx_ring[RX_MPDU_QUEUE].dma &
dma               734 drivers/net/wireless/realtek/rtlwifi/rtl8192de/hw.c 			rtlpci->tx_ring[BEACON_QUEUE].dma);
dma               735 drivers/net/wireless/realtek/rtlwifi/rtl8192de/hw.c 	rtl_write_dword(rtlpriv, REG_MGQ_DESA, rtlpci->tx_ring[MGNT_QUEUE].dma);
dma               736 drivers/net/wireless/realtek/rtlwifi/rtl8192de/hw.c 	rtl_write_dword(rtlpriv, REG_VOQ_DESA, rtlpci->tx_ring[VO_QUEUE].dma);
dma               737 drivers/net/wireless/realtek/rtlwifi/rtl8192de/hw.c 	rtl_write_dword(rtlpriv, REG_VIQ_DESA, rtlpci->tx_ring[VI_QUEUE].dma);
dma               738 drivers/net/wireless/realtek/rtlwifi/rtl8192de/hw.c 	rtl_write_dword(rtlpriv, REG_BEQ_DESA, rtlpci->tx_ring[BE_QUEUE].dma);
dma               739 drivers/net/wireless/realtek/rtlwifi/rtl8192de/hw.c 	rtl_write_dword(rtlpriv, REG_BKQ_DESA, rtlpci->tx_ring[BK_QUEUE].dma);
dma               740 drivers/net/wireless/realtek/rtlwifi/rtl8192de/hw.c 	rtl_write_dword(rtlpriv, REG_HQ_DESA, rtlpci->tx_ring[HIGH_QUEUE].dma);
dma               743 drivers/net/wireless/realtek/rtlwifi/rtl8192de/hw.c 			rtlpci->rx_ring[RX_MPDU_QUEUE].dma);
dma               842 drivers/net/wireless/realtek/rtlwifi/rtl8192ee/hw.c 			(u64)rtlpci->rx_ring[RX_MPDU_QUEUE].dma >> 32);
dma               874 drivers/net/wireless/realtek/rtlwifi/rtl8192ee/hw.c 			(u64)rtlpci->rx_ring[RX_MPDU_QUEUE].dma &
dma               595 drivers/net/wireless/realtek/rtlwifi/rtl8192ee/trx.c 	desc_dma_addr = rtlpci->tx_ring[queue_index].dma +
dma               695 drivers/net/wireless/realtek/rtlwifi/rtl8192se/hw.c 	rtl_write_dword(rtlpriv, RDQDA, rtlpci->rx_ring[RX_MPDU_QUEUE].dma);
dma               696 drivers/net/wireless/realtek/rtlwifi/rtl8192se/hw.c 	rtl_write_dword(rtlpriv, RCDA, rtlpci->rx_ring[RX_CMD_QUEUE].dma);
dma               699 drivers/net/wireless/realtek/rtlwifi/rtl8192se/hw.c 	rtl_write_dword(rtlpriv, TBKDA, rtlpci->tx_ring[BK_QUEUE].dma);
dma               700 drivers/net/wireless/realtek/rtlwifi/rtl8192se/hw.c 	rtl_write_dword(rtlpriv, TBEDA, rtlpci->tx_ring[BE_QUEUE].dma);
dma               701 drivers/net/wireless/realtek/rtlwifi/rtl8192se/hw.c 	rtl_write_dword(rtlpriv, TVIDA, rtlpci->tx_ring[VI_QUEUE].dma);
dma               702 drivers/net/wireless/realtek/rtlwifi/rtl8192se/hw.c 	rtl_write_dword(rtlpriv, TVODA, rtlpci->tx_ring[VO_QUEUE].dma);
dma               703 drivers/net/wireless/realtek/rtlwifi/rtl8192se/hw.c 	rtl_write_dword(rtlpriv, TBDA, rtlpci->tx_ring[BEACON_QUEUE].dma);
dma               704 drivers/net/wireless/realtek/rtlwifi/rtl8192se/hw.c 	rtl_write_dword(rtlpriv, TCDA, rtlpci->tx_ring[TXCMD_QUEUE].dma);
dma               705 drivers/net/wireless/realtek/rtlwifi/rtl8192se/hw.c 	rtl_write_dword(rtlpriv, TMDA, rtlpci->tx_ring[MGNT_QUEUE].dma);
dma               706 drivers/net/wireless/realtek/rtlwifi/rtl8192se/hw.c 	rtl_write_dword(rtlpriv, THPDA, rtlpci->tx_ring[HIGH_QUEUE].dma);
dma               707 drivers/net/wireless/realtek/rtlwifi/rtl8192se/hw.c 	rtl_write_dword(rtlpriv, HDA, rtlpci->tx_ring[HCCA_QUEUE].dma);
dma               737 drivers/net/wireless/realtek/rtlwifi/rtl8723ae/hw.c 			((u64) rtlpci->tx_ring[BEACON_QUEUE].dma) &
dma               740 drivers/net/wireless/realtek/rtlwifi/rtl8723ae/hw.c 			(u64) rtlpci->tx_ring[MGNT_QUEUE].dma &
dma               743 drivers/net/wireless/realtek/rtlwifi/rtl8723ae/hw.c 			(u64) rtlpci->tx_ring[VO_QUEUE].dma & DMA_BIT_MASK(32));
dma               745 drivers/net/wireless/realtek/rtlwifi/rtl8723ae/hw.c 			(u64) rtlpci->tx_ring[VI_QUEUE].dma & DMA_BIT_MASK(32));
dma               747 drivers/net/wireless/realtek/rtlwifi/rtl8723ae/hw.c 			(u64) rtlpci->tx_ring[BE_QUEUE].dma & DMA_BIT_MASK(32));
dma               749 drivers/net/wireless/realtek/rtlwifi/rtl8723ae/hw.c 			(u64) rtlpci->tx_ring[BK_QUEUE].dma & DMA_BIT_MASK(32));
dma               751 drivers/net/wireless/realtek/rtlwifi/rtl8723ae/hw.c 			(u64) rtlpci->tx_ring[HIGH_QUEUE].dma &
dma               754 drivers/net/wireless/realtek/rtlwifi/rtl8723ae/hw.c 			(u64) rtlpci->rx_ring[RX_MPDU_QUEUE].dma &
dma               884 drivers/net/wireless/realtek/rtlwifi/rtl8723be/hw.c 			((u64) rtlpci->tx_ring[BEACON_QUEUE].dma) &
dma               887 drivers/net/wireless/realtek/rtlwifi/rtl8723be/hw.c 			(u64) rtlpci->tx_ring[MGNT_QUEUE].dma &
dma               890 drivers/net/wireless/realtek/rtlwifi/rtl8723be/hw.c 			(u64) rtlpci->tx_ring[VO_QUEUE].dma & DMA_BIT_MASK(32));
dma               892 drivers/net/wireless/realtek/rtlwifi/rtl8723be/hw.c 			(u64) rtlpci->tx_ring[VI_QUEUE].dma & DMA_BIT_MASK(32));
dma               894 drivers/net/wireless/realtek/rtlwifi/rtl8723be/hw.c 			(u64) rtlpci->tx_ring[BE_QUEUE].dma & DMA_BIT_MASK(32));
dma               896 drivers/net/wireless/realtek/rtlwifi/rtl8723be/hw.c 			(u64) rtlpci->tx_ring[BK_QUEUE].dma & DMA_BIT_MASK(32));
dma               898 drivers/net/wireless/realtek/rtlwifi/rtl8723be/hw.c 			(u64) rtlpci->tx_ring[HIGH_QUEUE].dma &
dma               901 drivers/net/wireless/realtek/rtlwifi/rtl8723be/hw.c 			(u64) rtlpci->rx_ring[RX_MPDU_QUEUE].dma &
dma               977 drivers/net/wireless/realtek/rtlwifi/rtl8821ae/hw.c 			rtlpci->tx_ring[BEACON_QUEUE].dma & DMA_BIT_MASK(32));
dma               979 drivers/net/wireless/realtek/rtlwifi/rtl8821ae/hw.c 			rtlpci->tx_ring[MGNT_QUEUE].dma & DMA_BIT_MASK(32));
dma               981 drivers/net/wireless/realtek/rtlwifi/rtl8821ae/hw.c 			rtlpci->tx_ring[VO_QUEUE].dma & DMA_BIT_MASK(32));
dma               983 drivers/net/wireless/realtek/rtlwifi/rtl8821ae/hw.c 			rtlpci->tx_ring[VI_QUEUE].dma & DMA_BIT_MASK(32));
dma               985 drivers/net/wireless/realtek/rtlwifi/rtl8821ae/hw.c 			rtlpci->tx_ring[BE_QUEUE].dma & DMA_BIT_MASK(32));
dma               987 drivers/net/wireless/realtek/rtlwifi/rtl8821ae/hw.c 			rtlpci->tx_ring[BK_QUEUE].dma & DMA_BIT_MASK(32));
dma               989 drivers/net/wireless/realtek/rtlwifi/rtl8821ae/hw.c 			rtlpci->tx_ring[HIGH_QUEUE].dma & DMA_BIT_MASK(32));
dma               991 drivers/net/wireless/realtek/rtlwifi/rtl8821ae/hw.c 			rtlpci->rx_ring[RX_MPDU_QUEUE].dma & DMA_BIT_MASK(32));
dma              1427 drivers/net/wireless/realtek/rtlwifi/rtl8821ae/hw.c 			rtlpci->tx_ring[BEACON_QUEUE].dma & DMA_BIT_MASK(32));
dma              1429 drivers/net/wireless/realtek/rtlwifi/rtl8821ae/hw.c 			rtlpci->tx_ring[MGNT_QUEUE].dma & DMA_BIT_MASK(32));
dma              1431 drivers/net/wireless/realtek/rtlwifi/rtl8821ae/hw.c 			rtlpci->tx_ring[VO_QUEUE].dma & DMA_BIT_MASK(32));
dma              1433 drivers/net/wireless/realtek/rtlwifi/rtl8821ae/hw.c 			rtlpci->tx_ring[VI_QUEUE].dma & DMA_BIT_MASK(32));
dma              1435 drivers/net/wireless/realtek/rtlwifi/rtl8821ae/hw.c 			rtlpci->tx_ring[BE_QUEUE].dma & DMA_BIT_MASK(32));
dma              1437 drivers/net/wireless/realtek/rtlwifi/rtl8821ae/hw.c 			rtlpci->tx_ring[BK_QUEUE].dma & DMA_BIT_MASK(32));
dma              1439 drivers/net/wireless/realtek/rtlwifi/rtl8821ae/hw.c 			rtlpci->tx_ring[HIGH_QUEUE].dma & DMA_BIT_MASK(32));
dma              1441 drivers/net/wireless/realtek/rtlwifi/rtl8821ae/hw.c 			rtlpci->rx_ring[RX_MPDU_QUEUE].dma & DMA_BIT_MASK(32));
dma                99 drivers/net/wireless/realtek/rtw88/pci.c 	dma_addr_t dma;
dma               105 drivers/net/wireless/realtek/rtw88/pci.c 		dma = tx_data->dma;
dma               107 drivers/net/wireless/realtek/rtw88/pci.c 		pci_unmap_single(pdev, dma, skb->len, PCI_DMA_TODEVICE);
dma               123 drivers/net/wireless/realtek/rtw88/pci.c 	pci_free_consistent(pdev, ring_sz, head, tx_ring->r.dma);
dma               133 drivers/net/wireless/realtek/rtw88/pci.c 	dma_addr_t dma;
dma               141 drivers/net/wireless/realtek/rtw88/pci.c 		dma = *((dma_addr_t *)skb->cb);
dma               142 drivers/net/wireless/realtek/rtw88/pci.c 		pci_unmap_single(pdev, dma, buf_sz, PCI_DMA_FROMDEVICE);
dma               157 drivers/net/wireless/realtek/rtw88/pci.c 	pci_free_consistent(pdev, ring_sz, head, rx_ring->r.dma);
dma               184 drivers/net/wireless/realtek/rtw88/pci.c 	dma_addr_t dma;
dma               187 drivers/net/wireless/realtek/rtw88/pci.c 	head = pci_zalloc_consistent(pdev, ring_sz, &dma);
dma               195 drivers/net/wireless/realtek/rtw88/pci.c 	tx_ring->r.dma = dma;
dma               211 drivers/net/wireless/realtek/rtw88/pci.c 	dma_addr_t dma;
dma               216 drivers/net/wireless/realtek/rtw88/pci.c 	dma = pci_map_single(pdev, skb->data, buf_sz, PCI_DMA_FROMDEVICE);
dma               217 drivers/net/wireless/realtek/rtw88/pci.c 	if (pci_dma_mapping_error(pdev, dma))
dma               220 drivers/net/wireless/realtek/rtw88/pci.c 	*((dma_addr_t *)skb->cb) = dma;
dma               225 drivers/net/wireless/realtek/rtw88/pci.c 	buf_desc->dma = cpu_to_le32(dma);
dma               230 drivers/net/wireless/realtek/rtw88/pci.c static void rtw_pci_sync_rx_desc_device(struct rtw_dev *rtwdev, dma_addr_t dma,
dma               238 drivers/net/wireless/realtek/rtw88/pci.c 	dma_sync_single_for_device(dev, dma, buf_sz, DMA_FROM_DEVICE);
dma               244 drivers/net/wireless/realtek/rtw88/pci.c 	buf_desc->dma = cpu_to_le32(dma);
dma               253 drivers/net/wireless/realtek/rtw88/pci.c 	dma_addr_t dma;
dma               260 drivers/net/wireless/realtek/rtw88/pci.c 	head = pci_zalloc_consistent(pdev, ring_sz, &dma);
dma               285 drivers/net/wireless/realtek/rtw88/pci.c 	rx_ring->r.dma = dma;
dma               298 drivers/net/wireless/realtek/rtw88/pci.c 		dma = *((dma_addr_t *)skb->cb);
dma               299 drivers/net/wireless/realtek/rtw88/pci.c 		pci_unmap_single(pdev, dma, buf_sz, PCI_DMA_FROMDEVICE);
dma               303 drivers/net/wireless/realtek/rtw88/pci.c 	pci_free_consistent(pdev, ring_sz, head, dma);
dma               393 drivers/net/wireless/realtek/rtw88/pci.c 	dma_addr_t dma;
dma               398 drivers/net/wireless/realtek/rtw88/pci.c 	dma = rtwpci->tx_rings[RTW_TX_QUEUE_BCN].r.dma;
dma               399 drivers/net/wireless/realtek/rtw88/pci.c 	rtw_write32(rtwdev, RTK_PCI_TXBD_DESA_BCNQ, dma);
dma               402 drivers/net/wireless/realtek/rtw88/pci.c 	dma = rtwpci->tx_rings[RTW_TX_QUEUE_H2C].r.dma;
dma               406 drivers/net/wireless/realtek/rtw88/pci.c 	rtw_write32(rtwdev, RTK_PCI_TXBD_DESA_H2CQ, dma);
dma               409 drivers/net/wireless/realtek/rtw88/pci.c 	dma = rtwpci->tx_rings[RTW_TX_QUEUE_BK].r.dma;
dma               413 drivers/net/wireless/realtek/rtw88/pci.c 	rtw_write32(rtwdev, RTK_PCI_TXBD_DESA_BKQ, dma);
dma               416 drivers/net/wireless/realtek/rtw88/pci.c 	dma = rtwpci->tx_rings[RTW_TX_QUEUE_BE].r.dma;
dma               420 drivers/net/wireless/realtek/rtw88/pci.c 	rtw_write32(rtwdev, RTK_PCI_TXBD_DESA_BEQ, dma);
dma               423 drivers/net/wireless/realtek/rtw88/pci.c 	dma = rtwpci->tx_rings[RTW_TX_QUEUE_VO].r.dma;
dma               427 drivers/net/wireless/realtek/rtw88/pci.c 	rtw_write32(rtwdev, RTK_PCI_TXBD_DESA_VOQ, dma);
dma               430 drivers/net/wireless/realtek/rtw88/pci.c 	dma = rtwpci->tx_rings[RTW_TX_QUEUE_VI].r.dma;
dma               434 drivers/net/wireless/realtek/rtw88/pci.c 	rtw_write32(rtwdev, RTK_PCI_TXBD_DESA_VIQ, dma);
dma               437 drivers/net/wireless/realtek/rtw88/pci.c 	dma = rtwpci->tx_rings[RTW_TX_QUEUE_MGMT].r.dma;
dma               441 drivers/net/wireless/realtek/rtw88/pci.c 	rtw_write32(rtwdev, RTK_PCI_TXBD_DESA_MGMTQ, dma);
dma               444 drivers/net/wireless/realtek/rtw88/pci.c 	dma = rtwpci->tx_rings[RTW_TX_QUEUE_HI0].r.dma;
dma               448 drivers/net/wireless/realtek/rtw88/pci.c 	rtw_write32(rtwdev, RTK_PCI_TXBD_DESA_HI0Q, dma);
dma               451 drivers/net/wireless/realtek/rtw88/pci.c 	dma = rtwpci->rx_rings[RTW_RX_QUEUE_MPDU].r.dma;
dma               455 drivers/net/wireless/realtek/rtw88/pci.c 	rtw_write32(rtwdev, RTK_PCI_RXBD_DESA_MPDUQ, dma);
dma               570 drivers/net/wireless/realtek/rtw88/pci.c 	dma_addr_t dma;
dma               576 drivers/net/wireless/realtek/rtw88/pci.c 	dma = tx_data->dma;
dma               577 drivers/net/wireless/realtek/rtw88/pci.c 	pci_unmap_single(rtwpci->pdev, dma, prev->len,
dma               611 drivers/net/wireless/realtek/rtw88/pci.c 	dma_addr_t dma;
dma               633 drivers/net/wireless/realtek/rtw88/pci.c 	dma = pci_map_single(rtwpci->pdev, skb->data, skb->len,
dma               635 drivers/net/wireless/realtek/rtw88/pci.c 	if (pci_dma_mapping_error(rtwpci->pdev, dma))
dma               647 drivers/net/wireless/realtek/rtw88/pci.c 	buf_desc[0].dma = cpu_to_le32(dma);
dma               649 drivers/net/wireless/realtek/rtw88/pci.c 	buf_desc[1].dma = cpu_to_le32(dma + tx_pkt_desc_sz);
dma               652 drivers/net/wireless/realtek/rtw88/pci.c 	tx_data->dma = dma;
dma               771 drivers/net/wireless/realtek/rtw88/pci.c 		pci_unmap_single(rtwpci->pdev, tx_data->dma, skb->len,
dma               825 drivers/net/wireless/realtek/rtw88/pci.c 	dma_addr_t dma;
dma               841 drivers/net/wireless/realtek/rtw88/pci.c 		dma = *((dma_addr_t *)skb->cb);
dma               842 drivers/net/wireless/realtek/rtw88/pci.c 		dma_sync_single_for_cpu(rtwdev->dev, dma, RTK_PCI_RX_BUF_SIZE,
dma               875 drivers/net/wireless/realtek/rtw88/pci.c 		rtw_pci_sync_rx_desc_device(rtwdev, dma, ring, cur_rp,
dma               144 drivers/net/wireless/realtek/rtw88/pci.h 	__le32 dma;
dma               148 drivers/net/wireless/realtek/rtw88/pci.h 	dma_addr_t dma;
dma               154 drivers/net/wireless/realtek/rtw88/pci.h 	dma_addr_t dma;
dma               172 drivers/net/wireless/realtek/rtw88/pci.h 	__le32 dma;
dma              1065 drivers/net/wireless/ti/wl1251/acx.h 	struct acx_dma_statistics dma;
dma               112 drivers/net/wireless/ti/wl1251/debugfs.c DEBUGFS_FWSTATS_FILE(dma, rx_requested, 20, "%u");
dma               113 drivers/net/wireless/ti/wl1251/debugfs.c DEBUGFS_FWSTATS_FILE(dma, rx_errors, 20, "%u");
dma               114 drivers/net/wireless/ti/wl1251/debugfs.c DEBUGFS_FWSTATS_FILE(dma, tx_requested, 20, "%u");
dma               115 drivers/net/wireless/ti/wl1251/debugfs.c DEBUGFS_FWSTATS_FILE(dma, tx_errors, 20, "%u");
dma               252 drivers/net/wireless/ti/wl1251/debugfs.c 	DEBUGFS_FWSTATS_DEL(dma, rx_requested);
dma               253 drivers/net/wireless/ti/wl1251/debugfs.c 	DEBUGFS_FWSTATS_DEL(dma, rx_errors);
dma               254 drivers/net/wireless/ti/wl1251/debugfs.c 	DEBUGFS_FWSTATS_DEL(dma, tx_requested);
dma               255 drivers/net/wireless/ti/wl1251/debugfs.c 	DEBUGFS_FWSTATS_DEL(dma, tx_errors);
dma               351 drivers/net/wireless/ti/wl1251/debugfs.c 	DEBUGFS_FWSTATS_ADD(dma, rx_requested);
dma               352 drivers/net/wireless/ti/wl1251/debugfs.c 	DEBUGFS_FWSTATS_ADD(dma, rx_errors);
dma               353 drivers/net/wireless/ti/wl1251/debugfs.c 	DEBUGFS_FWSTATS_ADD(dma, tx_requested);
dma               354 drivers/net/wireless/ti/wl1251/debugfs.c 	DEBUGFS_FWSTATS_ADD(dma, tx_errors);
dma               246 drivers/net/wireless/ti/wl12xx/acx.h 	struct wl12xx_acx_dma_statistics dma;
dma                30 drivers/net/wireless/ti/wl12xx/debugfs.c WL12XX_DEBUGFS_FWSTATS_FILE(dma, rx_requested, "%u");
dma                31 drivers/net/wireless/ti/wl12xx/debugfs.c WL12XX_DEBUGFS_FWSTATS_FILE(dma, rx_errors, "%u");
dma                32 drivers/net/wireless/ti/wl12xx/debugfs.c WL12XX_DEBUGFS_FWSTATS_FILE(dma, tx_requested, "%u");
dma                33 drivers/net/wireless/ti/wl12xx/debugfs.c WL12XX_DEBUGFS_FWSTATS_FILE(dma, tx_errors, "%u");
dma               130 drivers/net/wireless/ti/wl12xx/debugfs.c 	DEBUGFS_FWSTATS_ADD(dma, rx_requested);
dma               131 drivers/net/wireless/ti/wl12xx/debugfs.c 	DEBUGFS_FWSTATS_ADD(dma, rx_errors);
dma               132 drivers/net/wireless/ti/wl12xx/debugfs.c 	DEBUGFS_FWSTATS_ADD(dma, tx_requested);
dma               133 drivers/net/wireless/ti/wl12xx/debugfs.c 	DEBUGFS_FWSTATS_ADD(dma, tx_errors);
dma                48 drivers/nvme/host/rdma.c 	u64			dma;
dma               173 drivers/nvme/host/rdma.c 	ib_dma_unmap_single(ibdev, qe->dma, capsule_size, dir);
dma               184 drivers/nvme/host/rdma.c 	qe->dma = ib_dma_map_single(ibdev, qe->data, capsule_size, dir);
dma               185 drivers/nvme/host/rdma.c 	if (ib_dma_mapping_error(ibdev, qe->dma)) {
dma              1349 drivers/nvme/host/rdma.c 	sge->addr   = qe->dma;
dma              1380 drivers/nvme/host/rdma.c 	list.addr   = qe->dma;
dma              1424 drivers/nvme/host/rdma.c 	ib_dma_sync_single_for_cpu(dev, sqe->dma, sizeof(*cmd), DMA_TO_DEVICE);
dma              1434 drivers/nvme/host/rdma.c 	ib_dma_sync_single_for_device(dev, sqe->dma, sizeof(*cmd),
dma              1499 drivers/nvme/host/rdma.c 	ib_dma_sync_single_for_cpu(ibdev, qe->dma, len, DMA_FROM_DEVICE);
dma              1512 drivers/nvme/host/rdma.c 	ib_dma_sync_single_for_device(ibdev, qe->dma, len, DMA_FROM_DEVICE);
dma              1753 drivers/nvme/host/rdma.c 	req->sqe.dma = ib_dma_map_single(dev, req->sqe.data,
dma              1756 drivers/nvme/host/rdma.c 	err = ib_dma_mapping_error(dev, req->sqe.dma);
dma              1760 drivers/nvme/host/rdma.c 	ib_dma_sync_single_for_cpu(dev, sqe->dma,
dma              1779 drivers/nvme/host/rdma.c 	ib_dma_sync_single_for_device(dev, sqe->dma,
dma              1797 drivers/nvme/host/rdma.c 	ib_dma_unmap_single(dev, req->sqe.dma, sizeof(struct nvme_command),
dma              1816 drivers/nvme/host/rdma.c 	ib_dma_unmap_single(ibdev, req->sqe.dma, sizeof(struct nvme_command),
dma                78 drivers/parport/daisy.c 						       real->dma,
dma               232 drivers/parport/parport_gsc.c 				       int dma, struct parisc_device *padev)
dma               259 drivers/parport/parport_gsc.c 	p->dma = dma;
dma               293 drivers/parport/parport_gsc.c 		if (p->dma == PARPORT_DMA_AUTO) {
dma               294 drivers/parport/parport_gsc.c 			p->dma = PARPORT_DMA_NONE;
dma               297 drivers/parport/parport_gsc.c 	if (p->dma == PARPORT_DMA_AUTO) /* To use DMA, giving the irq
dma               299 drivers/parport/parport_gsc.c 		p->dma = PARPORT_DMA_NONE;
dma               322 drivers/parport/parport_gsc.c 			p->dma = PARPORT_DMA_NONE;
dma               386 drivers/parport/parport_gsc.c 		if (p->dma != PARPORT_DMA_NONE)
dma               387 drivers/parport/parport_gsc.c 			free_dma(p->dma);
dma               205 drivers/parport/parport_gsc.h 						int irq, int dma,
dma              2098 drivers/parport/parport_ip32.c 			p->dma = 0; /* arbitrary value != PARPORT_DMA_NONE */
dma               101 drivers/parport/parport_pc.c 	int dma;
dma               620 drivers/parport/parport_pc.c 		disable_dma(port->dma);
dma               621 drivers/parport/parport_pc.c 		clear_dma_ff(port->dma);
dma               622 drivers/parport/parport_pc.c 		set_dma_mode(port->dma, DMA_MODE_WRITE);
dma               623 drivers/parport/parport_pc.c 		set_dma_addr(port->dma, dma_addr);
dma               624 drivers/parport/parport_pc.c 		set_dma_count(port->dma, count);
dma               632 drivers/parport/parport_pc.c 		enable_dma(port->dma);
dma               660 drivers/parport/parport_pc.c 		disable_dma(port->dma);
dma               661 drivers/parport/parport_pc.c 		clear_dma_ff(port->dma);
dma               662 drivers/parport/parport_pc.c 		count = get_dma_residue(port->dma);
dma               682 drivers/parport/parport_pc.c 	disable_dma(port->dma);
dma               683 drivers/parport/parport_pc.c 	clear_dma_ff(port->dma);
dma               684 drivers/parport/parport_pc.c 	left += get_dma_residue(port->dma);
dma               702 drivers/parport/parport_pc.c 	if (port->dma != PARPORT_DMA_NONE)
dma              1034 drivers/parport/parport_pc.c 				s->dma = d;
dma              1036 drivers/parport/parport_pc.c 				s->dma = PARPORT_DMA_NONE;
dma              1104 drivers/parport/parport_pc.c 			s->dma = (((cr74 & 0x07) > 3) ?
dma              1390 drivers/parport/parport_pc.c 		return s->dma;
dma              1990 drivers/parport/parport_pc.c 	int dma;
dma              1994 drivers/parport/parport_pc.c 	dma = inb(CONFIGB(p)) & 0x07;
dma              1997 drivers/parport/parport_pc.c 	if ((dma & 0x03) == 0)
dma              1998 drivers/parport/parport_pc.c 		dma = PARPORT_DMA_NONE;
dma              2001 drivers/parport/parport_pc.c 	return dma;
dma              2008 drivers/parport/parport_pc.c 		p->dma = programmable_dma_support(p);
dma              2009 drivers/parport/parport_pc.c 	if (p->dma == PARPORT_DMA_NONE) {
dma              2013 drivers/parport/parport_pc.c 		p->dma = get_superio_dma(p);
dma              2016 drivers/parport/parport_pc.c 	return p->dma;
dma              2026 drivers/parport/parport_pc.c 				      int irq, int dma,
dma              2052 drivers/parport/parport_pc.c 			dma = PARPORT_DMA_NONE;
dma              2065 drivers/parport/parport_pc.c 	p = parport_register_port(base, irq, dma, ops);
dma              2126 drivers/parport/parport_pc.c 		if (p->dma == PARPORT_DMA_AUTO) {
dma              2127 drivers/parport/parport_pc.c 			p->dma = PARPORT_DMA_NONE;
dma              2131 drivers/parport/parport_pc.c 	if (p->dma == PARPORT_DMA_AUTO) /* To use DMA, giving the irq
dma              2133 drivers/parport/parport_pc.c 		p->dma = PARPORT_DMA_NONE;
dma              2137 drivers/parport/parport_pc.c 	    p->dma != PARPORT_DMA_NOFIFO &&
dma              2146 drivers/parport/parport_pc.c 		if (p->dma != PARPORT_DMA_NONE) {
dma              2147 drivers/parport/parport_pc.c 			printk(KERN_CONT ", dma %d", p->dma);
dma              2153 drivers/parport/parport_pc.c 		p->dma = PARPORT_DMA_NONE;
dma              2200 drivers/parport/parport_pc.c 			p->dma = PARPORT_DMA_NONE;
dma              2205 drivers/parport/parport_pc.c 		if (p->dma != PARPORT_DMA_NONE) {
dma              2206 drivers/parport/parport_pc.c 			if (request_dma(p->dma, p->name)) {
dma              2209 drivers/parport/parport_pc.c 					p->name, p->dma);
dma              2210 drivers/parport/parport_pc.c 				p->dma = PARPORT_DMA_NONE;
dma              2222 drivers/parport/parport_pc.c 					free_dma(p->dma);
dma              2223 drivers/parport/parport_pc.c 					p->dma = PARPORT_DMA_NONE;
dma              2281 drivers/parport/parport_pc.c 	if (p->dma != PARPORT_DMA_NONE)
dma              2282 drivers/parport/parport_pc.c 		free_dma(p->dma);
dma              2440 drivers/parport/parport_pc.c 	int dma, irq;
dma              2539 drivers/parport/parport_pc.c 		dma = ((tmp & VIA_DMACONTROL_PARALLEL) >> 2);
dma              2543 drivers/parport/parport_pc.c 		dma = PARPORT_DMA_NONE;
dma              2548 drivers/parport/parport_pc.c 		dma = PARPORT_DMA_NONE;
dma              2551 drivers/parport/parport_pc.c 		dma = PARPORT_DMA_NONE;
dma              2581 drivers/parport/parport_pc.c 	if (parport_pc_probe_port(port1, port2, irq, dma, &pdev->dev, 0)) {
dma              2586 drivers/parport/parport_pc.c 		if (dma != PARPORT_DMA_NONE)
dma              2587 drivers/parport/parport_pc.c 			pr_cont(", dma=%d", dma);
dma              2593 drivers/parport/parport_pc.c 		port1, irq, dma);
dma              2954 drivers/parport/parport_pc.c 	int dma, irq;
dma              2976 drivers/parport/parport_pc.c 		dma = pnp_dma(dev, 0);
dma              2978 drivers/parport/parport_pc.c 		dma = PARPORT_DMA_NONE;
dma              2981 drivers/parport/parport_pc.c 	pdata = parport_pc_probe_port(io_lo, io_hi, irq, dma, &dev->dev, 0);
dma              3155 drivers/parport/parport_pc.c static char *dma[PARPORT_PC_MAX_PORTS];
dma              3163 drivers/parport/parport_pc.c MODULE_PARM_DESC(dma, "DMA channel");
dma              3164 drivers/parport/parport_pc.c module_param_hw_array(dma, charp, dma, NULL, 0);
dma              3191 drivers/parport/parport_pc.c 		if (parport_parse_dma(dma[i], &val))
dma              3210 drivers/parport/parport_pc.c 		if (dma[0] && !parport_parse_dma(dma[0], &val))
dma               273 drivers/parport/parport_sunbpp.c 	int irq, dma, err = 0, size;
dma               286 drivers/parport/parport_sunbpp.c 	dma = PARPORT_DMA_NONE;
dma               296 drivers/parport/parport_sunbpp.c 	if (!(p = parport_register_port((unsigned long)base, irq, dma, ops))) {
dma               187 drivers/parport/procfs.c 	len += sprintf (buffer, "%d\n", port->dma);
dma               457 drivers/parport/share.c struct parport *parport_register_port(unsigned long base, int irq, int dma,
dma               474 drivers/parport/share.c 	tmp->dma = dma;
dma                71 drivers/platform/mellanox/mlxbf-tmfifo.c 	dma_addr_t dma;
dma               219 drivers/platform/mellanox/mlxbf-tmfifo.c 					  vring->va, vring->dma);
dma               235 drivers/platform/mellanox/mlxbf-tmfifo.c 	dma_addr_t dma;
dma               249 drivers/platform/mellanox/mlxbf-tmfifo.c 		va = dma_alloc_coherent(dev->parent, size, &dma, GFP_KERNEL);
dma               257 drivers/platform/mellanox/mlxbf-tmfifo.c 		vring->dma = dma;
dma                76 drivers/pnp/base.h 		struct pnp_dma dma;
dma               167 drivers/pnp/base.h struct pnp_resource *pnp_add_dma_resource(struct pnp_dev *dev, int dma,
dma               101 drivers/pnp/interface.c 			  struct pnp_dma *dma)
dma               108 drivers/pnp/interface.c 		if (dma->map & (1 << i)) {
dma               116 drivers/pnp/interface.c 	if (!dma->map)
dma               118 drivers/pnp/interface.c 	switch (dma->flags & IORESOURCE_DMA_TYPE_MASK) {
dma               129 drivers/pnp/interface.c 	if (dma->flags & IORESOURCE_DMA_MASTER)
dma               131 drivers/pnp/interface.c 	if (dma->flags & IORESOURCE_DMA_BYTE)
dma               133 drivers/pnp/interface.c 	if (dma->flags & IORESOURCE_DMA_WORD)
dma               135 drivers/pnp/interface.c 	switch (dma->flags & IORESOURCE_DMA_SPEED_MASK) {
dma               202 drivers/pnp/interface.c 		pnp_print_dma(buffer, space, &option->u.dma);
dma               284 drivers/pnp/manager.c 			ret = pnp_assign_dma(dev, &option->u.dma, ndma++);
dma               168 drivers/pnp/pnpacpi/rsparser.c 	struct acpi_resource_dma *dma;
dma               233 drivers/pnp/pnpacpi/rsparser.c 		dma = &res->data.dma;
dma               234 drivers/pnp/pnpacpi/rsparser.c 		if (dma->channel_count > 0 && dma->channels[0] != (u8) -1)
dma               235 drivers/pnp/pnpacpi/rsparser.c 			flags = dma_flags(dev, dma->type, dma->bus_master,
dma               236 drivers/pnp/pnpacpi/rsparser.c 					  dma->transfer);
dma               239 drivers/pnp/pnpacpi/rsparser.c 		pnp_add_dma_resource(dev, dma->channels[0], flags);
dma               467 drivers/pnp/pnpacpi/rsparser.c 		pnpacpi_parse_dma_option(dev, option_flags, &res->data.dma);
dma               719 drivers/pnp/pnpacpi/rsparser.c 	struct acpi_resource_dma *dma = &resource->data.dma;
dma               722 drivers/pnp/pnpacpi/rsparser.c 		dma->channel_count = 0;
dma               731 drivers/pnp/pnpacpi/rsparser.c 		dma->type = ACPI_TYPE_A;
dma               734 drivers/pnp/pnpacpi/rsparser.c 		dma->type = ACPI_TYPE_B;
dma               737 drivers/pnp/pnpacpi/rsparser.c 		dma->type = ACPI_TYPE_F;
dma               740 drivers/pnp/pnpacpi/rsparser.c 		dma->type = ACPI_COMPATIBILITY;
dma               745 drivers/pnp/pnpacpi/rsparser.c 		dma->transfer = ACPI_TRANSFER_8;
dma               748 drivers/pnp/pnpacpi/rsparser.c 		dma->transfer = ACPI_TRANSFER_8_16;
dma               751 drivers/pnp/pnpacpi/rsparser.c 		dma->transfer = ACPI_TRANSFER_16;
dma               754 drivers/pnp/pnpacpi/rsparser.c 	dma->bus_master = !!(p->flags & IORESOURCE_DMA_MASTER);
dma               755 drivers/pnp/pnpacpi/rsparser.c 	dma->channel_count = 1;
dma               756 drivers/pnp/pnpacpi/rsparser.c 	dma->channels[0] = p->start;
dma               760 drivers/pnp/pnpacpi/rsparser.c 		(int) p->start, dma->type, dma->transfer, dma->bus_master);
dma               882 drivers/pnp/pnpacpi/rsparser.c 	unsigned int port = 0, irq = 0, dma = 0, mem = 0;
dma               895 drivers/pnp/pnpacpi/rsparser.c 				pnp_get_resource(dev, IORESOURCE_DMA, dma));
dma               896 drivers/pnp/pnpacpi/rsparser.c 			dma++;
dma               664 drivers/pnp/pnpbios/rsparser.c 	int port = 0, irq = 0, dma = 0, mem = 0;
dma               718 drivers/pnp/pnpbios/rsparser.c 				pnp_get_resource(dev, IORESOURCE_DMA, dma));
dma               719 drivers/pnp/pnpbios/rsparser.c 			dma++;
dma                73 drivers/pnp/quirks.c 	struct pnp_dma *dma;
dma                89 drivers/pnp/quirks.c 			dma = &option->u.dma;
dma                90 drivers/pnp/quirks.c 			if ((dma->flags & IORESOURCE_DMA_TYPE_MASK) ==
dma                92 drivers/pnp/quirks.c 			    dma->map != 0x0A) {
dma                96 drivers/pnp/quirks.c 					 pnp_option_set(option), dma->map);
dma                97 drivers/pnp/quirks.c 				dma->map = 0x0A;
dma                83 drivers/pnp/resource.c 	struct pnp_dma *dma;
dma                89 drivers/pnp/resource.c 	dma = &option->u.dma;
dma                90 drivers/pnp/resource.c 	dma->map = map;
dma                91 drivers/pnp/resource.c 	dma->flags = flags;
dma               421 drivers/pnp/resource.c 	resource_size_t *dma;
dma               423 drivers/pnp/resource.c 	dma = &res->start;
dma               430 drivers/pnp/resource.c 	if (*dma == 4 || *dma > 7)
dma               435 drivers/pnp/resource.c 		if (pnp_reserve_dma[i] == *dma)
dma               442 drivers/pnp/resource.c 			if (tres->start == *dma)
dma               450 drivers/pnp/resource.c 		if (request_dma(*dma, "pnp"))
dma               452 drivers/pnp/resource.c 		free_dma(*dma);
dma               465 drivers/pnp/resource.c 				if (tres->start == *dma)
dma               547 drivers/pnp/resource.c struct pnp_resource *pnp_add_dma_resource(struct pnp_dev *dev, int dma,
dma               555 drivers/pnp/resource.c 		dev_err(&dev->dev, "can't add resource for DMA %d\n", dma);
dma               561 drivers/pnp/resource.c 	res->start = dma;
dma               562 drivers/pnp/resource.c 	res->end = dma;
dma               651 drivers/pnp/resource.c 	struct pnp_dma *dma;
dma               675 drivers/pnp/resource.c 			dma = &option->u.dma;
dma               676 drivers/pnp/resource.c 			if (dma->map & (1 << start))
dma               112 drivers/pnp/support.c 	struct pnp_dma *dma;
dma               162 drivers/pnp/support.c 		dma = &option->u.dma;
dma               164 drivers/pnp/support.c 		if (!dma->map)
dma               169 drivers/pnp/support.c 				if (dma->map & (1 << i))
dma               175 drivers/pnp/support.c 				 "flags %#x", dma->map, dma->flags);
dma                54 drivers/rapidio/devices/tsi721_dma.c 	return container_of(ddev, struct rio_mport, dma)->priv;
dma               969 drivers/rapidio/devices/tsi721_dma.c 	INIT_LIST_HEAD(&mport->dma.channels);
dma               979 drivers/rapidio/devices/tsi721_dma.c 		bdma_chan->dchan.device = &mport->dma;
dma               994 drivers/rapidio/devices/tsi721_dma.c 			      &mport->dma.channels);
dma               998 drivers/rapidio/devices/tsi721_dma.c 	mport->dma.chancnt = nr_channels;
dma               999 drivers/rapidio/devices/tsi721_dma.c 	dma_cap_zero(mport->dma.cap_mask);
dma              1000 drivers/rapidio/devices/tsi721_dma.c 	dma_cap_set(DMA_PRIVATE, mport->dma.cap_mask);
dma              1001 drivers/rapidio/devices/tsi721_dma.c 	dma_cap_set(DMA_SLAVE, mport->dma.cap_mask);
dma              1003 drivers/rapidio/devices/tsi721_dma.c 	mport->dma.dev = &priv->pdev->dev;
dma              1004 drivers/rapidio/devices/tsi721_dma.c 	mport->dma.device_alloc_chan_resources = tsi721_alloc_chan_resources;
dma              1005 drivers/rapidio/devices/tsi721_dma.c 	mport->dma.device_free_chan_resources = tsi721_free_chan_resources;
dma              1006 drivers/rapidio/devices/tsi721_dma.c 	mport->dma.device_tx_status = tsi721_tx_status;
dma              1007 drivers/rapidio/devices/tsi721_dma.c 	mport->dma.device_issue_pending = tsi721_issue_pending;
dma              1008 drivers/rapidio/devices/tsi721_dma.c 	mport->dma.device_prep_slave_sg = tsi721_prep_rio_sg;
dma              1009 drivers/rapidio/devices/tsi721_dma.c 	mport->dma.device_terminate_all = tsi721_terminate_all;
dma              1011 drivers/rapidio/devices/tsi721_dma.c 	err = dma_async_device_register(&mport->dma);
dma              1025 drivers/rapidio/devices/tsi721_dma.c 	dma_async_device_unregister(&mport->dma);
dma              1027 drivers/rapidio/devices/tsi721_dma.c 	list_for_each_entry_safe(chan, _c, &mport->dma.channels,
dma              1822 drivers/rapidio/rio.c 	return mport == container_of(chan->device, struct rio_mport, dma);
dma               743 drivers/remoteproc/remoteproc_core.c 	dma_addr_t dma;
dma               747 drivers/remoteproc/remoteproc_core.c 	va = dma_alloc_coherent(dev->parent, mem->len, &dma, GFP_KERNEL);
dma               755 drivers/remoteproc/remoteproc_core.c 		va, &dma, mem->len);
dma               764 drivers/remoteproc/remoteproc_core.c 		if (mem->da != (u32)dma)
dma               793 drivers/remoteproc/remoteproc_core.c 		ret = iommu_map(rproc->domain, mem->da, dma, mem->len,
dma               812 drivers/remoteproc/remoteproc_core.c 			mem->da, &dma);
dma               817 drivers/remoteproc/remoteproc_core.c 		if ((u64)dma & HIGH_BITS_MASK)
dma               820 drivers/remoteproc/remoteproc_core.c 		mem->da = (u32)dma;
dma               823 drivers/remoteproc/remoteproc_core.c 	mem->dma = dma;
dma               831 drivers/remoteproc/remoteproc_core.c 	dma_free_coherent(dev->parent, mem->len, va, dma);
dma               849 drivers/remoteproc/remoteproc_core.c 	dma_free_coherent(dev->parent, mem->len, mem->va, mem->dma);
dma               961 drivers/remoteproc/remoteproc_core.c 		     void *va, dma_addr_t dma, int len, u32 da,
dma               974 drivers/remoteproc/remoteproc_core.c 	mem->dma = dma;
dma              1216 drivers/remoteproc/remoteproc_core.c 				pa = (u64)entry->dma;
dma               294 drivers/remoteproc/remoteproc_debugfs.c 		seq_printf(seq, "\tDMA address: %pad\n", &carveout->dma);
dma               366 drivers/remoteproc/remoteproc_virtio.c 				pa = (phys_addr_t)mem->dma;
dma                98 drivers/remoteproc/st_remoteproc.c 	va = ioremap_wc(mem->dma, mem->len);
dma               101 drivers/remoteproc/st_remoteproc.c 			&mem->dma, mem->len);
dma               100 drivers/remoteproc/stm32_rproc.c 	dev_dbg(dev, "map memory: %pa+%x\n", &mem->dma, mem->len);
dma               101 drivers/remoteproc/stm32_rproc.c 	va = ioremap_wc(mem->dma, mem->len);
dma               104 drivers/remoteproc/stm32_rproc.c 			&mem->dma, mem->len);
dma               117 drivers/remoteproc/stm32_rproc.c 	dev_dbg(rproc->dev.parent, "unmap memory: %pa\n", &mem->dma);
dma               267 drivers/scsi/aha152x.h     unsigned dma:1;         /* Transfer mode: 0=PIO; 1=DMA */
dma               280 drivers/scsi/aha152x.h #define cf_dma        fields.dma
dma                89 drivers/scsi/aha1740.c 					  dma_addr_t dma)
dma                94 drivers/scsi/aha1740.c 	offset = dma - hdata->ecb_dma_addr;
dma                50 drivers/scsi/aic94xx/aic94xx_task.c 		dma_addr_t dma = dma_map_single(&asd_ha->pcidev->dev, p,
dma                53 drivers/scsi/aic94xx/aic94xx_task.c 		sg_arr[0].bus_addr = cpu_to_le64((u64)dma);
dma               125 drivers/scsi/aic94xx/aic94xx_task.c 		dma_addr_t dma = (dma_addr_t)
dma               127 drivers/scsi/aic94xx/aic94xx_task.c 		dma_unmap_single(&ascb->ha->pcidev->dev, dma,
dma               243 drivers/scsi/arm/acornscsi.c 	printk("DMA @%06x, ", host->dma.start_addr);
dma               246 drivers/scsi/arm/acornscsi.c 	printk("DT @+%04x ST @+%04x", host->dma.transferred,
dma               739 drivers/scsi/arm/acornscsi.c     host->dma.xfer_setup = 0;
dma               740 drivers/scsi/arm/acornscsi.c     host->dma.xfer_required = 0;
dma               741 drivers/scsi/arm/acornscsi.c     host->dma.xfer_done = 0;
dma               822 drivers/scsi/arm/acornscsi.c 			    host->scsi.SCp.scsi_xferred != host->dma.transferred)
dma               838 drivers/scsi/arm/acornscsi.c 		if (host->dma.xfer_done)
dma               889 drivers/scsi/arm/acornscsi.c 	host->dma.xfer_done = 1;
dma              1017 drivers/scsi/arm/acornscsi.c     host->dma.direction = direction;
dma              1038 drivers/scsi/arm/acornscsi.c 	host->dma.start_addr = address = host->dma.free_addr;
dma              1039 drivers/scsi/arm/acornscsi.c 	host->dma.free_addr = (host->dma.free_addr + length) &
dma              1046 drivers/scsi/arm/acornscsi.c 	    acornscsi_data_write(host, host->scsi.SCp.ptr, host->dma.start_addr,
dma              1061 drivers/scsi/arm/acornscsi.c 	host->dma.xfer_setup = 1;
dma              1082 drivers/scsi/arm/acornscsi.c     if (host->dma.xfer_required) {
dma              1083 drivers/scsi/arm/acornscsi.c 	host->dma.xfer_required = 0;
dma              1084 drivers/scsi/arm/acornscsi.c 	if (host->dma.direction == DMA_IN)
dma              1085 drivers/scsi/arm/acornscsi.c 	    acornscsi_data_read(host, host->dma.xfer_ptr,
dma              1086 drivers/scsi/arm/acornscsi.c 				 host->dma.xfer_start, host->dma.xfer_length);
dma              1092 drivers/scsi/arm/acornscsi.c     if (host->dma.xfer_setup) {
dma              1095 drivers/scsi/arm/acornscsi.c 	host->dma.xfer_setup = 0;
dma              1104 drivers/scsi/arm/acornscsi.c 	transferred = dmac_address(host) - host->dma.start_addr;
dma              1105 drivers/scsi/arm/acornscsi.c 	host->dma.transferred += transferred;
dma              1107 drivers/scsi/arm/acornscsi.c 	if (host->dma.direction == DMA_IN)
dma              1109 drivers/scsi/arm/acornscsi.c 				 host->dma.start_addr, transferred);
dma              1146 drivers/scsi/arm/acornscsi.c     transferred = dmac_address(host) - host->dma.start_addr;
dma              1147 drivers/scsi/arm/acornscsi.c     host->dma.transferred += transferred;
dma              1152 drivers/scsi/arm/acornscsi.c     if (host->dma.direction == DMA_IN) {
dma              1153 drivers/scsi/arm/acornscsi.c 	host->dma.xfer_start = host->dma.start_addr;
dma              1154 drivers/scsi/arm/acornscsi.c 	host->dma.xfer_length = transferred;
dma              1155 drivers/scsi/arm/acornscsi.c 	host->dma.xfer_ptr = host->scsi.SCp.ptr;
dma              1156 drivers/scsi/arm/acornscsi.c 	host->dma.xfer_required = 1;
dma              1166 drivers/scsi/arm/acornscsi.c 	host->dma.start_addr = address = host->dma.free_addr;
dma              1167 drivers/scsi/arm/acornscsi.c 	host->dma.free_addr = (host->dma.free_addr + length) &
dma              1173 drivers/scsi/arm/acornscsi.c 	if (host->dma.direction == DMA_OUT)
dma              1174 drivers/scsi/arm/acornscsi.c 	    acornscsi_data_write(host, host->scsi.SCp.ptr, host->dma.start_addr,
dma              1189 drivers/scsi/arm/acornscsi.c 	host->dma.xfer_setup = 0;
dma              1219 drivers/scsi/arm/acornscsi.c     host->dma.xfer_required = 0;
dma              1221 drivers/scsi/arm/acornscsi.c     if (host->dma.direction == DMA_IN)
dma              1222 drivers/scsi/arm/acornscsi.c 	acornscsi_data_read(host, host->dma.xfer_ptr,
dma              1223 drivers/scsi/arm/acornscsi.c 				host->dma.xfer_start, host->dma.xfer_length);
dma              1235 drivers/scsi/arm/acornscsi.c     if (host->dma.xfer_setup) {
dma              1251 drivers/scsi/arm/acornscsi.c 	transferred = host->scsi.SCp.scsi_xferred - host->dma.transferred;
dma              1256 drivers/scsi/arm/acornscsi.c 	    host->dma.xfer_setup = 0;
dma              1258 drivers/scsi/arm/acornscsi.c 	    transferred += host->dma.start_addr;
dma              1873 drivers/scsi/arm/acornscsi.c     host->dma.transferred = host->scsi.SCp.scsi_xferred;
dma              1976 drivers/scsi/arm/acornscsi.c 	    host->dma.transferred = host->scsi.SCp.scsi_xferred;
dma              2444 drivers/scsi/arm/acornscsi.c 	if (host->dma.xfer_required)
dma               339 drivers/scsi/arm/acornscsi.h     } dma;
dma               282 drivers/scsi/arm/arxescsi.c 	info->info.scsi.dma		= NO_DMA;
dma               292 drivers/scsi/arm/arxescsi.c 	info->info.dma.setup		= arxescsi_dma_setup;
dma               293 drivers/scsi/arm/arxescsi.c 	info->info.dma.pseudo		= arxescsi_dma_pseudo;
dma               294 drivers/scsi/arm/arxescsi.c 	info->info.dma.stop		= arxescsi_dma_stop;
dma                49 drivers/scsi/arm/cumana_1.c   u8 __iomem *dma = hostdata->pdma_io + 0x2000;
dma                64 drivers/scsi/arm/cumana_1.c     v=*laddr++; writew(L(v), dma); writew(H(v), dma);
dma                65 drivers/scsi/arm/cumana_1.c     v=*laddr++; writew(L(v), dma); writew(H(v), dma);
dma                66 drivers/scsi/arm/cumana_1.c     v=*laddr++; writew(L(v), dma); writew(H(v), dma);
dma                67 drivers/scsi/arm/cumana_1.c     v=*laddr++; writew(L(v), dma); writew(H(v), dma);
dma                68 drivers/scsi/arm/cumana_1.c     v=*laddr++; writew(L(v), dma); writew(H(v), dma);
dma                69 drivers/scsi/arm/cumana_1.c     v=*laddr++; writew(L(v), dma); writew(H(v), dma);
dma                70 drivers/scsi/arm/cumana_1.c     v=*laddr++; writew(L(v), dma); writew(H(v), dma);
dma                71 drivers/scsi/arm/cumana_1.c     v=*laddr++; writew(L(v), dma); writew(H(v), dma);
dma                88 drivers/scsi/arm/cumana_1.c       writeb(*addr++, dma);
dma                98 drivers/scsi/arm/cumana_1.c       writeb(*addr++, dma);
dma               116 drivers/scsi/arm/cumana_1.c   u8 __iomem *dma = hostdata->pdma_io + 0x2000;
dma               130 drivers/scsi/arm/cumana_1.c     *laddr++ = readw(dma) | (readw(dma) << 16);
dma               131 drivers/scsi/arm/cumana_1.c     *laddr++ = readw(dma) | (readw(dma) << 16);
dma               132 drivers/scsi/arm/cumana_1.c     *laddr++ = readw(dma) | (readw(dma) << 16);
dma               133 drivers/scsi/arm/cumana_1.c     *laddr++ = readw(dma) | (readw(dma) << 16);
dma               134 drivers/scsi/arm/cumana_1.c     *laddr++ = readw(dma) | (readw(dma) << 16);
dma               135 drivers/scsi/arm/cumana_1.c     *laddr++ = readw(dma) | (readw(dma) << 16);
dma               136 drivers/scsi/arm/cumana_1.c     *laddr++ = readw(dma) | (readw(dma) << 16);
dma               137 drivers/scsi/arm/cumana_1.c     *laddr++ = readw(dma) | (readw(dma) << 16);
dma               154 drivers/scsi/arm/cumana_1.c       *addr++ = readb(dma);
dma               164 drivers/scsi/arm/cumana_1.c       *addr++ = readb(dma);
dma               300 drivers/scsi/arm/cumana_1.c 	void __iomem *dma = priv(host)->pdma_io;
dma               309 drivers/scsi/arm/cumana_1.c 	iounmap(dma);
dma               159 drivers/scsi/arm/cumana_2.c 	int dmach = info->info.scsi.dma;
dma               283 drivers/scsi/arm/cumana_2.c 	if (info->info.scsi.dma != NO_DMA) {
dma               285 drivers/scsi/arm/cumana_2.c 		disable_dma(info->info.scsi.dma);
dma               406 drivers/scsi/arm/cumana_2.c 	info->info.scsi.dma		= ec->dma;
dma               415 drivers/scsi/arm/cumana_2.c 	info->info.dma.setup		= cumanascsi_2_dma_setup;
dma               416 drivers/scsi/arm/cumana_2.c 	info->info.dma.pseudo		= cumanascsi_2_dma_pseudo;
dma               417 drivers/scsi/arm/cumana_2.c 	info->info.dma.stop		= cumanascsi_2_dma_stop;
dma               436 drivers/scsi/arm/cumana_2.c 	if (info->info.scsi.dma != NO_DMA) {
dma               437 drivers/scsi/arm/cumana_2.c 		if (request_dma(info->info.scsi.dma, "cumanascsi2")) {
dma               439 drivers/scsi/arm/cumana_2.c 			       host->host_no, info->info.scsi.dma);
dma               440 drivers/scsi/arm/cumana_2.c 			info->info.scsi.dma = NO_DMA;
dma               442 drivers/scsi/arm/cumana_2.c 			set_dma_speed(info->info.scsi.dma, 180);
dma               451 drivers/scsi/arm/cumana_2.c 	if (info->info.scsi.dma != NO_DMA)
dma               452 drivers/scsi/arm/cumana_2.c 		free_dma(info->info.scsi.dma);
dma               476 drivers/scsi/arm/cumana_2.c 	if (info->info.scsi.dma != NO_DMA)
dma               477 drivers/scsi/arm/cumana_2.c 		free_dma(info->info.scsi.dma);
dma               160 drivers/scsi/arm/eesox.c 	int dmach = info->info.scsi.dma;
dma               370 drivers/scsi/arm/eesox.c 	if (info->info.scsi.dma != NO_DMA)
dma               371 drivers/scsi/arm/eesox.c 		disable_dma(info->info.scsi.dma);
dma               525 drivers/scsi/arm/eesox.c 	info->info.scsi.dma		= ec->dma;
dma               534 drivers/scsi/arm/eesox.c 	info->info.dma.setup		= eesoxscsi_dma_setup;
dma               535 drivers/scsi/arm/eesox.c 	info->info.dma.pseudo		= eesoxscsi_dma_pseudo;
dma               536 drivers/scsi/arm/eesox.c 	info->info.dma.stop		= eesoxscsi_dma_stop;
dma               556 drivers/scsi/arm/eesox.c 	if (info->info.scsi.dma != NO_DMA) {
dma               557 drivers/scsi/arm/eesox.c 		if (request_dma(info->info.scsi.dma, "eesox")) {
dma               559 drivers/scsi/arm/eesox.c 			       host->host_no, info->info.scsi.dma);
dma               560 drivers/scsi/arm/eesox.c 			info->info.scsi.dma = NO_DMA;
dma               562 drivers/scsi/arm/eesox.c 			set_dma_speed(info->info.scsi.dma, 180);
dma               572 drivers/scsi/arm/eesox.c 	if (info->info.scsi.dma != NO_DMA)
dma               573 drivers/scsi/arm/eesox.c 		free_dma(info->info.scsi.dma);
dma               598 drivers/scsi/arm/eesox.c 	if (info->info.scsi.dma != NO_DMA)
dma               599 drivers/scsi/arm/eesox.c 		free_dma(info->info.scsi.dma);
dma               220 drivers/scsi/arm/fas216.c 		info->dma.transfer_type, info->dma.setup,
dma               221 drivers/scsi/arm/fas216.c 		info->dma.pseudo, info->dma.stop);
dma               706 drivers/scsi/arm/fas216.c 	fasdmatype_t dmatype = info->dma.transfer_type;
dma               708 drivers/scsi/arm/fas216.c 	info->dma.transfer_type = fasdma_none;
dma               780 drivers/scsi/arm/fas216.c 	if (info->dma.setup)
dma               781 drivers/scsi/arm/fas216.c 		dmatype = info->dma.setup(info->host, &info->scsi.SCp,
dma               783 drivers/scsi/arm/fas216.c 	info->dma.transfer_type = dmatype;
dma               802 drivers/scsi/arm/fas216.c 		info->dma.pseudo(info->host, &info->scsi.SCp,
dma               833 drivers/scsi/arm/fas216.c 	if (info->dma.transfer_type == fasdma_real_all ||
dma               834 drivers/scsi/arm/fas216.c 	    info->dma.transfer_type == fasdma_real_block)
dma               835 drivers/scsi/arm/fas216.c 		info->dma.stop(info->host, &info->scsi.SCp);
dma               866 drivers/scsi/arm/fas216.c 	if (info->dma.transfer_type == fasdma_real_all ||
dma               867 drivers/scsi/arm/fas216.c 	    info->dma.transfer_type == fasdma_real_block)
dma               868 drivers/scsi/arm/fas216.c 		info->dma.stop(info->host, &info->scsi.SCp);
dma               870 drivers/scsi/arm/fas216.c 	info->dma.transfer_type = fasdma_none;
dma              1771 drivers/scsi/arm/fas216.c 	info->dma.transfer_type = fasdma_none;
dma              1861 drivers/scsi/arm/fas216.c 	info->dma.transfer_type = fasdma_none;
dma              2969 drivers/scsi/arm/fas216.c 			info->scsi.irq, info->scsi.dma);
dma               239 drivers/scsi/arm/fas216.h 		int		dma;			/* dma channel				*/
dma               305 drivers/scsi/arm/fas216.h 	} dma;
dma               133 drivers/scsi/arm/powertec.c 	int dmach = info->info.scsi.dma;
dma               173 drivers/scsi/arm/powertec.c 	if (info->info.scsi.dma != NO_DMA)
dma               174 drivers/scsi/arm/powertec.c 		disable_dma(info->info.scsi.dma);
dma               332 drivers/scsi/arm/powertec.c 	info->info.scsi.dma		= ec->dma;
dma               341 drivers/scsi/arm/powertec.c 	info->info.dma.setup		= powertecscsi_dma_setup;
dma               342 drivers/scsi/arm/powertec.c 	info->info.dma.pseudo		= NULL;
dma               343 drivers/scsi/arm/powertec.c 	info->info.dma.stop		= powertecscsi_dma_stop;
dma               364 drivers/scsi/arm/powertec.c 	if (info->info.scsi.dma != NO_DMA) {
dma               365 drivers/scsi/arm/powertec.c 		if (request_dma(info->info.scsi.dma, "powertec")) {
dma               367 drivers/scsi/arm/powertec.c 			       host->host_no, info->info.scsi.dma);
dma               368 drivers/scsi/arm/powertec.c 			info->info.scsi.dma = NO_DMA;
dma               370 drivers/scsi/arm/powertec.c 			set_dma_speed(info->info.scsi.dma, 180);
dma               379 drivers/scsi/arm/powertec.c 	if (info->info.scsi.dma != NO_DMA)
dma               380 drivers/scsi/arm/powertec.c 		free_dma(info->info.scsi.dma);
dma               407 drivers/scsi/arm/powertec.c 	if (info->info.scsi.dma != NO_DMA)
dma               408 drivers/scsi/arm/powertec.c 		free_dma(info->info.scsi.dma);
dma                26 drivers/scsi/be2iscsi/be.h 	dma_addr_t dma;
dma               286 drivers/scsi/be2iscsi/be_cmds.c 			tag_mem->dma = mbx_cmd_mem->dma;
dma               519 drivers/scsi/be2iscsi/be_cmds.c 					tag_mem->va, tag_mem->dma);
dma               649 drivers/scsi/be2iscsi/be_cmds.c 	val |= (upper_32_bits(mbox_mem->dma) >> 2) << 2;
dma               659 drivers/scsi/be2iscsi/be_cmds.c 	val |= (u32) (mbox_mem->dma >> 4) << 2;
dma               698 drivers/scsi/be2iscsi/be_cmds.c 	u64 dma = (u64) mem->dma;
dma               702 drivers/scsi/be2iscsi/be_cmds.c 		pages[i].lo = cpu_to_le32(dma & 0xFFFFFFFF);
dma               703 drivers/scsi/be2iscsi/be_cmds.c 		pages[i].hi = cpu_to_le32(upper_32_bits(dma));
dma               704 drivers/scsi/be2iscsi/be_cmds.c 		dma += PAGE_SIZE_4K;
dma              1194 drivers/scsi/be2iscsi/be_cmds.c 		q_mem->dma = q_mem->dma + (req->num_pages * PAGE_SIZE);
dma              1269 drivers/scsi/be2iscsi/be_cmds.c 				&nonemb_cmd.dma, GFP_KERNEL);
dma              1284 drivers/scsi/be2iscsi/be_cmds.c 	sge->pa_hi = cpu_to_le32(upper_32_bits(nonemb_cmd.dma));
dma              1285 drivers/scsi/be2iscsi/be_cmds.c 	sge->pa_lo = cpu_to_le32(nonemb_cmd.dma & 0xFFFFFFFF);
dma              1313 drivers/scsi/be2iscsi/be_cmds.c 				    nonemb_cmd.va, nonemb_cmd.dma);
dma              1073 drivers/scsi/be2iscsi/be_iscsi.c 				&nonemb_cmd.dma, GFP_KERNEL);
dma              1092 drivers/scsi/be2iscsi/be_iscsi.c 				    nonemb_cmd.va, nonemb_cmd.dma);
dma              1106 drivers/scsi/be2iscsi/be_iscsi.c 					nonemb_cmd.dma);
dma              1120 drivers/scsi/be2iscsi/be_iscsi.c 			    nonemb_cmd.va, nonemb_cmd.dma);
dma               538 drivers/scsi/be2iscsi/be_main.c 			mbox_mem_alloc->size, &mbox_mem_alloc->dma, GFP_KERNEL);
dma               546 drivers/scsi/be2iscsi/be_main.c 	mbox_mem_align->dma = PTR_ALIGN(mbox_mem_alloc->dma, 16);
dma              2943 drivers/scsi/be2iscsi/be_main.c 	sgl->dma = (unsigned long)physical_address;
dma              3032 drivers/scsi/be2iscsi/be_main.c 		mem->dma = paddr;
dma              3055 drivers/scsi/be2iscsi/be_main.c 					    mem->va, mem->dma);
dma              3098 drivers/scsi/be2iscsi/be_main.c 		mem->dma = paddr;
dma              3120 drivers/scsi/be2iscsi/be_main.c 					    mem->va, mem->dma);
dma              3156 drivers/scsi/be2iscsi/be_main.c 	mem->dma = (unsigned long)mem_descr->mem_array[idx].
dma              3209 drivers/scsi/be2iscsi/be_main.c 	mem->dma = (unsigned long)mem_descr->mem_array[idx].
dma              3311 drivers/scsi/be2iscsi/be_main.c 			mem->va, mem->dma);
dma              3325 drivers/scsi/be2iscsi/be_main.c 	mem->va = dma_alloc_coherent(&phba->pcidev->dev, mem->size, &mem->dma,
dma              3467 drivers/scsi/be2iscsi/be_main.c 						    ptag_mem->dma);
dma              4839 drivers/scsi/be2iscsi/be_main.c 					&nonemb_cmd.dma, GFP_KERNEL);
dma              4853 drivers/scsi/be2iscsi/be_main.c 					    nonemb_cmd.va, nonemb_cmd.dma);
dma              4867 drivers/scsi/be2iscsi/be_main.c 					    nonemb_cmd.va, nonemb_cmd.dma);
dma              4884 drivers/scsi/be2iscsi/be_main.c 				    nonemb_cmd.va, nonemb_cmd.dma);
dma              5742 drivers/scsi/be2iscsi/be_main.c 			    phba->ctrl.mbox_mem_alloced.dma);
dma              5786 drivers/scsi/be2iscsi/be_main.c 			    phba->ctrl.mbox_mem_alloced.dma);
dma                88 drivers/scsi/be2iscsi/be_mgmt.c 	mcc_sge->pa_hi = cpu_to_le32(upper_32_bits(nonemb_cmd->dma));
dma                89 drivers/scsi/be2iscsi/be_mgmt.c 	mcc_sge->pa_lo = cpu_to_le32(nonemb_cmd->dma & 0xFFFFFFFF);
dma               197 drivers/scsi/be2iscsi/be_mgmt.c 	sge->pa_hi = cpu_to_le32(upper_32_bits(nonemb_cmd->dma));
dma               198 drivers/scsi/be2iscsi/be_mgmt.c 	sge->pa_lo = cpu_to_le32(nonemb_cmd->dma & 0xFFFFFFFF);
dma               243 drivers/scsi/be2iscsi/be_mgmt.c 	sge->pa_hi = cpu_to_le32(upper_32_bits(nonemb_cmd->dma));
dma               244 drivers/scsi/be2iscsi/be_mgmt.c 	sge->pa_lo = cpu_to_le32(lower_32_bits(nonemb_cmd->dma));
dma               257 drivers/scsi/be2iscsi/be_mgmt.c 		tag_mem->dma = nonemb_cmd->dma;
dma               288 drivers/scsi/be2iscsi/be_mgmt.c 			    nonemb_cmd->va, nonemb_cmd->dma);
dma               296 drivers/scsi/be2iscsi/be_mgmt.c 	cmd->va = dma_alloc_coherent(&phba->ctrl.pdev->dev, size, &cmd->dma,
dma               320 drivers/scsi/be2iscsi/be_mgmt.c 				    tag_mem->va, tag_mem->dma);
dma               768 drivers/scsi/be2iscsi/be_mgmt.c 						    nonemb_cmd.dma);
dma               786 drivers/scsi/be2iscsi/be_mgmt.c 					    nonemb_cmd.dma);
dma               874 drivers/scsi/be2iscsi/be_mgmt.c 				    bs->nonemb_cmd.va, bs->nonemb_cmd.dma);
dma              1018 drivers/scsi/be2iscsi/be_mgmt.c 					      &nonemb_cmd->dma,
dma              1033 drivers/scsi/be2iscsi/be_mgmt.c 	sge->pa_hi = cpu_to_le32(upper_32_bits(nonemb_cmd->dma));
dma              1034 drivers/scsi/be2iscsi/be_mgmt.c 	sge->pa_lo = cpu_to_le32(nonemb_cmd->dma & 0xFFFFFFFF);
dma              1514 drivers/scsi/be2iscsi/be_mgmt.c 					   nonemb_cmd.size, &nonemb_cmd.dma,
dma              1527 drivers/scsi/be2iscsi/be_mgmt.c 				    nonemb_cmd.va, nonemb_cmd.dma);
dma              1544 drivers/scsi/be2iscsi/be_mgmt.c 	sge->pa_hi = cpu_to_le32(upper_32_bits(nonemb_cmd.dma));
dma              1545 drivers/scsi/be2iscsi/be_mgmt.c 	sge->pa_lo = cpu_to_le32(lower_32_bits(nonemb_cmd.dma));
dma              1554 drivers/scsi/be2iscsi/be_mgmt.c 				    nonemb_cmd.va, nonemb_cmd.dma);
dma              1842 drivers/scsi/bfa/bfa_core.c 	dma_info->dma_curp = dma_info->dma;
dma              1850 drivers/scsi/bfa/bfa_core.c 		dma_elem->dma_curp = dma_elem->dma;
dma                87 drivers/scsi/bfa/bfa_ioc.h 	u64		dma;		/* dma address if DMA memory */
dma               544 drivers/scsi/bfa/bfad.c 				(dma_addr_t) dma_elem->dma);
dma               624 drivers/scsi/bfa/bfad.c 		dma_elem->dma = phys_addr;
dma               526 drivers/scsi/esp_scsi.h 	void			*dma;
dma              1457 drivers/scsi/ibmvscsi_tgt/ibmvscsi_tgt.c 			 vscsi->dds.window[LOCAL].liobn, iue->sbuf->dma);
dma              1995 drivers/scsi/ibmvscsi_tgt/ibmvscsi_tgt.c 			 vscsi->dds.window[LOCAL].liobn, iue->sbuf->dma,
dma              2127 drivers/scsi/ibmvscsi_tgt/ibmvscsi_tgt.c 			 iue->sbuf->dma, vscsi->dds.window[REMOTE].liobn,
dma              2184 drivers/scsi/ibmvscsi_tgt/ibmvscsi_tgt.c 			 iue->sbuf->dma, vscsi->dds.window[REMOTE].liobn,
dma              3174 drivers/scsi/ibmvscsi_tgt/ibmvscsi_tgt.c 	rc = h_copy_rdma(len, vscsi->dds.window[LOCAL].liobn, iue->sbuf->dma,
dma                72 drivers/scsi/ibmvscsi_tgt/libsrp.c 		ring[i]->buf = dma_alloc_coherent(dev, size, &ring[i]->dma,
dma                83 drivers/scsi/ibmvscsi_tgt/libsrp.c 					  ring[i]->dma);
dma                98 drivers/scsi/ibmvscsi_tgt/libsrp.c 		dma_free_coherent(dev, size, ring[i]->buf, ring[i]->dma);
dma                72 drivers/scsi/ibmvscsi_tgt/libsrp.h 	dma_addr_t dma;
dma              2282 drivers/scsi/isci/host.c 		dma_addr_t dma;
dma              2284 drivers/scsi/isci/host.c 		ireq = dmam_alloc_coherent(dev, sizeof(*ireq), &dma, GFP_KERNEL);
dma              2290 drivers/scsi/isci/host.c 		ireq->request_daddr = dma;
dma                64 drivers/scsi/isci/unsolicited_frame_control.c 	dma_addr_t dma = ihost->ufi_dma;
dma                84 drivers/scsi/isci/unsolicited_frame_control.c 	uf_control->headers.physical_address = dma + SCI_UFI_BUF_SIZE;
dma                94 drivers/scsi/isci/unsolicited_frame_control.c 	uf_control->address_table.physical_address = dma + SCI_UFI_BUF_SIZE + SCI_UFI_HDR_SIZE;
dma               112 drivers/scsi/isci/unsolicited_frame_control.c 		uf_control->address_table.array[i] = dma;
dma               124 drivers/scsi/isci/unsolicited_frame_control.c 		dma += SCU_UNSOLICITED_FRAME_BUFFER_SIZE;
dma               133 drivers/scsi/lpfc/lpfc_bsg.c 	struct lpfc_dmabuf dma;
dma               891 drivers/scsi/lpfc/lpfc_bsg.c 	list_add_tail(&head, &mlist->dma.list);
dma               894 drivers/scsi/lpfc/lpfc_bsg.c 		mlast = list_entry(curr, struct lpfc_dmabufext , dma.list);
dma               895 drivers/scsi/lpfc/lpfc_bsg.c 		if (mlast->dma.virt)
dma               898 drivers/scsi/lpfc/lpfc_bsg.c 					  mlast->dma.virt,
dma               899 drivers/scsi/lpfc/lpfc_bsg.c 					  mlast->dma.phys);
dma              2898 drivers/scsi/lpfc/lpfc_bsg.c 		INIT_LIST_HEAD(&dmp->dma.list);
dma              2902 drivers/scsi/lpfc/lpfc_bsg.c 			list_add_tail(&dmp->dma.list, &mlist->dma.list);
dma              2907 drivers/scsi/lpfc/lpfc_bsg.c 		dmp->dma.virt = dma_alloc_coherent(&pcidev->dev,
dma              2909 drivers/scsi/lpfc/lpfc_bsg.c 						   &(dmp->dma.phys),
dma              2912 drivers/scsi/lpfc/lpfc_bsg.c 		if (!dmp->dma.virt)
dma              2920 drivers/scsi/lpfc/lpfc_bsg.c 			memset((uint8_t *)dmp->dma.virt, 0, cnt);
dma              2925 drivers/scsi/lpfc/lpfc_bsg.c 		bpl->addrLow = le32_to_cpu(putPaddrLow(dmp->dma.phys));
dma              2926 drivers/scsi/lpfc/lpfc_bsg.c 		bpl->addrHigh = le32_to_cpu(putPaddrHigh(dmp->dma.phys));
dma              2991 drivers/scsi/lpfc/lpfc_bsg.c 	dmp = &rxbuffer->dma;
dma              3253 drivers/scsi/lpfc/lpfc_bsg.c 	list_add_tail(&head, &txbuffer->dma.list);
dma               382 drivers/scsi/lpfc/lpfc_crtn.h void lpfc_nvmet_buf_free(struct lpfc_hba *phba, void *virtp, dma_addr_t dma);
dma               401 drivers/scsi/lpfc/lpfc_mem.c __lpfc_mbuf_free(struct lpfc_hba * phba, void *virt, dma_addr_t dma)
dma               407 drivers/scsi/lpfc/lpfc_mem.c 		pool->elements[pool->current_count].phys = dma;
dma               410 drivers/scsi/lpfc/lpfc_mem.c 		dma_pool_free(phba->lpfc_mbuf_pool, virt, dma);
dma               429 drivers/scsi/lpfc/lpfc_mem.c lpfc_mbuf_free(struct lpfc_hba * phba, void *virt, dma_addr_t dma)
dma               434 drivers/scsi/lpfc/lpfc_mem.c 	__lpfc_mbuf_free(phba, virt, dma);
dma               472 drivers/scsi/lpfc/lpfc_mem.c lpfc_nvmet_buf_free(struct lpfc_hba *phba, void *virt, dma_addr_t dma)
dma               474 drivers/scsi/lpfc/lpfc_mem.c 	dma_pool_free(phba->lpfc_sg_dma_buf_pool, virt, dma);
dma                47 drivers/scsi/mac53c94.c 	struct	dbdma_regs __iomem *dma;
dma               109 drivers/scsi/mac53c94.c 	struct dbdma_regs __iomem *dma = state->dma;
dma               114 drivers/scsi/mac53c94.c 	writel((RUN|PAUSE|FLUSH|WAKE) << 16, &dma->control);
dma               129 drivers/scsi/mac53c94.c 	struct dbdma_regs __iomem *dma = state->dma;
dma               140 drivers/scsi/mac53c94.c 	writel((RUN|PAUSE|FLUSH|WAKE) << 16, &dma->control);
dma               198 drivers/scsi/mac53c94.c 	struct dbdma_regs __iomem *dma = state->dma;
dma               220 drivers/scsi/mac53c94.c 		writel(RUN << 16, &dma->control);	/* stop dma */
dma               276 drivers/scsi/mac53c94.c 			writel(virt_to_phys(state->dma_cmds), &dma->cmdptr);
dma               277 drivers/scsi/mac53c94.c 			writel((RUN << 16) | RUN, &dma->control);
dma               315 drivers/scsi/mac53c94.c 		writel(RUN << 16, &dma->control);	/* stop dma */
dma               448 drivers/scsi/mac53c94.c 	state->dma = (struct dbdma_regs __iomem *)
dma               451 drivers/scsi/mac53c94.c 	if (state->regs == NULL || state->dma == NULL) {
dma               502 drivers/scsi/mac53c94.c 	if (state->dma != NULL)
dma               503 drivers/scsi/mac53c94.c 		iounmap(state->dma);
dma               524 drivers/scsi/mac53c94.c 	if (fp->dma)
dma               525 drivers/scsi/mac53c94.c 		iounmap(fp->dma);
dma               147 drivers/scsi/mesh.c 	volatile struct	dbdma_regs __iomem *dma;
dma               306 drivers/scsi/mesh.c 	volatile struct dbdma_regs __iomem *md = ms->dma;
dma               361 drivers/scsi/mesh.c 	volatile struct dbdma_regs __iomem *md = ms->dma;
dma               683 drivers/scsi/mesh.c 	volatile struct dbdma_regs __iomem *md = ms->dma;
dma              1321 drivers/scsi/mesh.c 	volatile struct dbdma_regs __iomem *md = ms->dma;
dma              1708 drivers/scsi/mesh.c 	volatile struct dbdma_regs __iomem *md = ms->dma;
dma              1901 drivers/scsi/mesh.c 	ms->dma = ioremap(macio_resource_start(mdev, 1), 0x1000);
dma              1902 drivers/scsi/mesh.c 	if (ms->dma == NULL) {
dma              1982 drivers/scsi/mesh.c 	iounmap(ms->dma);
dma              2009 drivers/scsi/mesh.c        	iounmap(ms->dma);
dma              1132 drivers/scsi/pmcraid.c 	dma_addr_t dma;
dma              1145 drivers/scsi/pmcraid.c 		dma = pinstance->ccn.baddr + PMCRAID_AEN_HDR_SIZE;
dma              1150 drivers/scsi/pmcraid.c 		dma = pinstance->ldn.baddr + PMCRAID_AEN_HDR_SIZE;
dma              1176 drivers/scsi/pmcraid.c 	ioadl[0].address = cpu_to_le64(dma);
dma              3436 drivers/scsi/qla2xxx/qla_def.h 	dma_addr_t  dma;
dma              3463 drivers/scsi/qla2xxx/qla_def.h 	dma_addr_t  dma;
dma              3941 drivers/scsi/qla2xxx/qla_init.c 	put_unaligned_le64(req->dma, &ha->init_cb->request_q_address);
dma              3942 drivers/scsi/qla2xxx/qla_init.c 	put_unaligned_le64(rsp->dma, &ha->init_cb->response_q_address);
dma              3969 drivers/scsi/qla2xxx/qla_init.c 	put_unaligned_le64(req->dma, &icb->request_q_address);
dma              3970 drivers/scsi/qla2xxx/qla_init.c 	put_unaligned_le64(rsp->dma, &icb->response_q_address);
dma              7681 drivers/scsi/qla2xxx/qla_init.c 			rval = qla2x00_load_ram(vha, req->dma, risc_addr, dlen);
dma              7847 drivers/scsi/qla2xxx/qla_init.c 			rval = qla2x00_load_ram(vha, req->dma, risc_addr,
dma              7938 drivers/scsi/qla2xxx/qla_init.c 			rval = qla2x00_load_ram(vha, req->dma, risc_addr, dlen);
dma              4354 drivers/scsi/qla2xxx/qla_mbx.c 	mcp->mb[2] = MSW(LSD(req->dma));
dma              4355 drivers/scsi/qla2xxx/qla_mbx.c 	mcp->mb[3] = LSW(LSD(req->dma));
dma              4356 drivers/scsi/qla2xxx/qla_mbx.c 	mcp->mb[6] = MSW(MSD(req->dma));
dma              4357 drivers/scsi/qla2xxx/qla_mbx.c 	mcp->mb[7] = LSW(MSD(req->dma));
dma              4427 drivers/scsi/qla2xxx/qla_mbx.c 	mcp->mb[2] = MSW(LSD(rsp->dma));
dma              4428 drivers/scsi/qla2xxx/qla_mbx.c 	mcp->mb[3] = LSW(LSD(rsp->dma));
dma              4429 drivers/scsi/qla2xxx/qla_mbx.c 	mcp->mb[6] = MSW(MSD(rsp->dma));
dma              4430 drivers/scsi/qla2xxx/qla_mbx.c 	mcp->mb[7] = LSW(MSD(rsp->dma));
dma               561 drivers/scsi/qla2xxx/qla_mid.c 		sizeof(request_t), req->ring, req->dma);
dma               563 drivers/scsi/qla2xxx/qla_mid.c 	req->dma = 0;
dma               588 drivers/scsi/qla2xxx/qla_mid.c 		sizeof(response_t), rsp->ring, rsp->dma);
dma               590 drivers/scsi/qla2xxx/qla_mid.c 	rsp->dma = 0;
dma               704 drivers/scsi/qla2xxx/qla_mid.c 			&req->dma, GFP_KERNEL);
dma               833 drivers/scsi/qla2xxx/qla_mid.c 			&rsp->dma, GFP_KERNEL);
dma               842 drivers/scsi/qla2xxx/qla_mr.c 	req->dma_fx00 = req->dma;
dma               846 drivers/scsi/qla2xxx/qla_mr.c 	rsp->dma_fx00 = rsp->dma;
dma               869 drivers/scsi/qla2xxx/qla_mr.c 	req->dma = bar2_hdl + ha->req_que_off;
dma               880 drivers/scsi/qla2xxx/qla_mr.c 	    ha->req_que_off, (u64)req->dma);
dma               884 drivers/scsi/qla2xxx/qla_mr.c 	rsp->dma = bar2_hdl + ha->rsp_que_off;
dma               895 drivers/scsi/qla2xxx/qla_mr.c 	    ha->rsp_que_off, (u64)rsp->dma);
dma              1790 drivers/scsi/qla2xxx/qla_nx.c 	put_unaligned_le64(req->dma, &icb->request_q_address);
dma              1791 drivers/scsi/qla2xxx/qla_nx.c 	put_unaligned_le64(rsp->dma, &icb->response_q_address);
dma               462 drivers/scsi/qla2xxx/qla_os.c 		req->ring, req->dma);
dma               480 drivers/scsi/qla2xxx/qla_os.c 		rsp->ring, rsp->dma);
dma              4159 drivers/scsi/qla2xxx/qla_os.c 		&(*req)->dma, GFP_KERNEL);
dma              4176 drivers/scsi/qla2xxx/qla_os.c 		&(*rsp)->dma, GFP_KERNEL);
dma              4271 drivers/scsi/qla2xxx/qla_os.c 		sizeof(response_t), (*rsp)->ring, (*rsp)->dma);
dma              4273 drivers/scsi/qla2xxx/qla_os.c 	(*rsp)->dma = 0;
dma              4279 drivers/scsi/qla2xxx/qla_os.c 		sizeof(request_t), (*req)->ring, (*req)->dma);
dma              4281 drivers/scsi/qla2xxx/qla_os.c 	(*req)->dma = 0;
dma                36 drivers/scsi/sgiwd93.c 	dma_addr_t dma;
dma               127 drivers/scsi/sgiwd93.c 	hregs->ndptr = hdata->dma;
dma               179 drivers/scsi/sgiwd93.c 	dma_addr_t dma = hdata->dma;
dma               185 drivers/scsi/sgiwd93.c 		hcp->desc.pnext = (u32) (dma + sizeof(struct hpc_chunk));
dma               188 drivers/scsi/sgiwd93.c 		dma += sizeof(struct hpc_chunk);
dma               192 drivers/scsi/sgiwd93.c 	hcp->desc.pnext = hdata->dma;
dma               237 drivers/scsi/sgiwd93.c 	hdata->cpu = dma_alloc_attrs(&pdev->dev, HPC_DMA_SIZE, &hdata->dma,
dma               277 drivers/scsi/sgiwd93.c 	dma_free_attrs(&pdev->dev, HPC_DMA_SIZE, hdata->cpu, hdata->dma,
dma               294 drivers/scsi/sgiwd93.c 	dma_free_attrs(&pdev->dev, HPC_DMA_SIZE, hdata->cpu, hdata->dma,
dma              3889 drivers/scsi/st.c 	tb->dma = need_dma;
dma                38 drivers/scsi/st.h 	unsigned char dma;	/* DMA-able buffer */
dma                49 drivers/scsi/sun_esp.c 	esp->dma = dma_of;
dma               555 drivers/scsi/sun_esp.c 	struct platform_device *dma_of = esp->dma;
dma               665 drivers/scsi/wd33c93.c 				hostdata->dma = D_DMA_RUNNING;
dma               788 drivers/scsi/wd33c93.c 		hostdata->dma = D_DMA_RUNNING;
dma               832 drivers/scsi/wd33c93.c 	    if (hostdata->dma == D_DMA_RUNNING) {
dma               836 drivers/scsi/wd33c93.c 		hostdata->dma = D_DMA_OFF;
dma              1591 drivers/scsi/wd33c93.c 	hostdata->dma = D_DMA_OFF;
dma              1662 drivers/scsi/wd33c93.c 		if (hostdata->dma == D_DMA_RUNNING) {
dma              1664 drivers/scsi/wd33c93.c 			hostdata->dma = D_DMA_OFF;
dma              1965 drivers/scsi/wd33c93.c 	hostdata->dma = D_DMA_OFF;
dma               236 drivers/scsi/wd33c93.h     uchar            dma;              /* current state of DMA (on/off) */
dma               117 drivers/soc/ti/knav_dma.c 	struct knav_dma_device		*dma;
dma               264 drivers/soc/ti/knav_dma.c static void dma_hw_enable_all(struct knav_dma_device *dma)
dma               268 drivers/soc/ti/knav_dma.c 	for (i = 0; i < dma->max_tx_chan; i++) {
dma               269 drivers/soc/ti/knav_dma.c 		writel_relaxed(0, &dma->reg_tx_chan[i].mode);
dma               270 drivers/soc/ti/knav_dma.c 		writel_relaxed(DMA_ENABLE, &dma->reg_tx_chan[i].control);
dma               275 drivers/soc/ti/knav_dma.c static void knav_dma_hw_init(struct knav_dma_device *dma)
dma               280 drivers/soc/ti/knav_dma.c 	spin_lock(&dma->lock);
dma               281 drivers/soc/ti/knav_dma.c 	v  = dma->loopback ? DMA_LOOPBACK : 0;
dma               282 drivers/soc/ti/knav_dma.c 	writel_relaxed(v, &dma->reg_global->emulation_control);
dma               284 drivers/soc/ti/knav_dma.c 	v = readl_relaxed(&dma->reg_global->perf_control);
dma               285 drivers/soc/ti/knav_dma.c 	v |= ((dma->rx_timeout & DMA_RX_TIMEOUT_MASK) << DMA_RX_TIMEOUT_SHIFT);
dma               286 drivers/soc/ti/knav_dma.c 	writel_relaxed(v, &dma->reg_global->perf_control);
dma               288 drivers/soc/ti/knav_dma.c 	v = ((dma->tx_priority << DMA_TX_PRIO_SHIFT) |
dma               289 drivers/soc/ti/knav_dma.c 	     (dma->rx_priority << DMA_RX_PRIO_SHIFT));
dma               291 drivers/soc/ti/knav_dma.c 	writel_relaxed(v, &dma->reg_global->priority_control);
dma               294 drivers/soc/ti/knav_dma.c 	for (i = 0; i < dma->max_rx_chan; i++)
dma               295 drivers/soc/ti/knav_dma.c 		writel_relaxed(DMA_ENABLE, &dma->reg_rx_chan[i].control);
dma               297 drivers/soc/ti/knav_dma.c 	for (i = 0; i < dma->logical_queue_managers; i++)
dma               298 drivers/soc/ti/knav_dma.c 		writel_relaxed(dma->qm_base_address[i],
dma               299 drivers/soc/ti/knav_dma.c 			       &dma->reg_global->qm_base_address[i]);
dma               300 drivers/soc/ti/knav_dma.c 	spin_unlock(&dma->lock);
dma               303 drivers/soc/ti/knav_dma.c static void knav_dma_hw_destroy(struct knav_dma_device *dma)
dma               308 drivers/soc/ti/knav_dma.c 	spin_lock(&dma->lock);
dma               311 drivers/soc/ti/knav_dma.c 	for (i = 0; i < dma->max_rx_chan; i++)
dma               312 drivers/soc/ti/knav_dma.c 		writel_relaxed(v, &dma->reg_rx_chan[i].control);
dma               314 drivers/soc/ti/knav_dma.c 	for (i = 0; i < dma->max_tx_chan; i++)
dma               315 drivers/soc/ti/knav_dma.c 		writel_relaxed(v, &dma->reg_tx_chan[i].control);
dma               316 drivers/soc/ti/knav_dma.c 	spin_unlock(&dma->lock);
dma               348 drivers/soc/ti/knav_dma.c 					struct knav_dma_device *dma)
dma               352 drivers/soc/ti/knav_dma.c 	list_for_each_entry(chan, &dma->chan_list, list) {
dma               360 drivers/soc/ti/knav_dma.c 	struct knav_dma_device *dma;
dma               362 drivers/soc/ti/knav_dma.c 	list_for_each_entry(dma, &kdev->list, list) {
dma               363 drivers/soc/ti/knav_dma.c 		if (atomic_read(&dma->ref_count)) {
dma               365 drivers/soc/ti/knav_dma.c 			dma->name, dma->max_tx_chan, dma->max_rx_flow);
dma               366 drivers/soc/ti/knav_dma.c 			dma_debug_show_devices(s, dma);
dma               429 drivers/soc/ti/knav_dma.c 	struct knav_dma_device *dma;
dma               457 drivers/soc/ti/knav_dma.c 	list_for_each_entry(dma, &kdev->list, list) {
dma               458 drivers/soc/ti/knav_dma.c 		if (!strcmp(dma->name, instance)) {
dma               470 drivers/soc/ti/knav_dma.c 	list_for_each_entry(chan, &dma->chan_list, list) {
dma               497 drivers/soc/ti/knav_dma.c 	if (atomic_inc_return(&chan->dma->ref_count) <= 1)
dma               498 drivers/soc/ti/knav_dma.c 		knav_dma_hw_init(chan->dma);
dma               528 drivers/soc/ti/knav_dma.c 	if (atomic_dec_return(&chan->dma->ref_count) <= 0)
dma               529 drivers/soc/ti/knav_dma.c 		knav_dma_hw_destroy(chan->dma);
dma               532 drivers/soc/ti/knav_dma.c 			chan->channel, chan->flow, chan->dma->name);
dma               536 drivers/soc/ti/knav_dma.c static void __iomem *pktdma_get_regs(struct knav_dma_device *dma,
dma               564 drivers/soc/ti/knav_dma.c 	struct knav_dma_device *dma = chan->dma;
dma               567 drivers/soc/ti/knav_dma.c 	chan->reg_rx_flow = dma->reg_rx_flow + flow;
dma               576 drivers/soc/ti/knav_dma.c 	struct knav_dma_device *dma = chan->dma;
dma               579 drivers/soc/ti/knav_dma.c 	chan->reg_chan = dma->reg_tx_chan + channel;
dma               580 drivers/soc/ti/knav_dma.c 	chan->reg_tx_sched = dma->reg_tx_sched + channel;
dma               587 drivers/soc/ti/knav_dma.c static int pktdma_init_chan(struct knav_dma_device *dma,
dma               600 drivers/soc/ti/knav_dma.c 	chan->dma	= dma;
dma               615 drivers/soc/ti/knav_dma.c 	list_add_tail(&chan->list, &dma->chan_list);
dma               624 drivers/soc/ti/knav_dma.c 	struct knav_dma_device *dma;
dma               630 drivers/soc/ti/knav_dma.c 	dma = devm_kzalloc(kdev->dev, sizeof(*dma), GFP_KERNEL);
dma               631 drivers/soc/ti/knav_dma.c 	if (!dma) {
dma               635 drivers/soc/ti/knav_dma.c 	INIT_LIST_HEAD(&dma->list);
dma               636 drivers/soc/ti/knav_dma.c 	INIT_LIST_HEAD(&dma->chan_list);
dma               643 drivers/soc/ti/knav_dma.c 	dma->logical_queue_managers = len / sizeof(u32);
dma               644 drivers/soc/ti/knav_dma.c 	if (dma->logical_queue_managers > DMA_MAX_QMS) {
dma               646 drivers/soc/ti/knav_dma.c 			 dma->logical_queue_managers);
dma               647 drivers/soc/ti/knav_dma.c 		dma->logical_queue_managers = DMA_MAX_QMS;
dma               651 drivers/soc/ti/knav_dma.c 					dma->qm_base_address,
dma               652 drivers/soc/ti/knav_dma.c 					dma->logical_queue_managers);
dma               658 drivers/soc/ti/knav_dma.c 	dma->reg_global	 = pktdma_get_regs(dma, node, 0, &size);
dma               659 drivers/soc/ti/knav_dma.c 	if (!dma->reg_global)
dma               666 drivers/soc/ti/knav_dma.c 	dma->reg_tx_chan = pktdma_get_regs(dma, node, 1, &size);
dma               667 drivers/soc/ti/knav_dma.c 	if (!dma->reg_tx_chan)
dma               671 drivers/soc/ti/knav_dma.c 	dma->reg_rx_chan = pktdma_get_regs(dma, node, 2, &size);
dma               672 drivers/soc/ti/knav_dma.c 	if (!dma->reg_rx_chan)
dma               676 drivers/soc/ti/knav_dma.c 	dma->reg_tx_sched = pktdma_get_regs(dma, node, 3, &size);
dma               677 drivers/soc/ti/knav_dma.c 	if (!dma->reg_tx_sched)
dma               681 drivers/soc/ti/knav_dma.c 	dma->reg_rx_flow = pktdma_get_regs(dma, node, 4, &size);
dma               682 drivers/soc/ti/knav_dma.c 	if (!dma->reg_rx_flow)
dma               686 drivers/soc/ti/knav_dma.c 	dma->rx_priority = DMA_PRIO_DEFAULT;
dma               687 drivers/soc/ti/knav_dma.c 	dma->tx_priority = DMA_PRIO_DEFAULT;
dma               689 drivers/soc/ti/knav_dma.c 	dma->enable_all	= (of_get_property(node, "ti,enable-all", NULL) != NULL);
dma               690 drivers/soc/ti/knav_dma.c 	dma->loopback	= (of_get_property(node, "ti,loop-back",  NULL) != NULL);
dma               699 drivers/soc/ti/knav_dma.c 	dma->rx_timeout = timeout;
dma               700 drivers/soc/ti/knav_dma.c 	dma->max_rx_chan = max_rx_chan;
dma               701 drivers/soc/ti/knav_dma.c 	dma->max_rx_flow = max_rx_flow;
dma               702 drivers/soc/ti/knav_dma.c 	dma->max_tx_chan = min(max_tx_chan, max_tx_sched);
dma               703 drivers/soc/ti/knav_dma.c 	atomic_set(&dma->ref_count, 0);
dma               704 drivers/soc/ti/knav_dma.c 	strcpy(dma->name, node->name);
dma               705 drivers/soc/ti/knav_dma.c 	spin_lock_init(&dma->lock);
dma               707 drivers/soc/ti/knav_dma.c 	for (i = 0; i < dma->max_tx_chan; i++) {
dma               708 drivers/soc/ti/knav_dma.c 		if (pktdma_init_chan(dma, DMA_MEM_TO_DEV, i) >= 0)
dma               712 drivers/soc/ti/knav_dma.c 	for (i = 0; i < dma->max_rx_flow; i++) {
dma               713 drivers/soc/ti/knav_dma.c 		if (pktdma_init_chan(dma, DMA_DEV_TO_MEM, i) >= 0)
dma               717 drivers/soc/ti/knav_dma.c 	list_add_tail(&dma->list, &kdev->list);
dma               723 drivers/soc/ti/knav_dma.c 	if (dma->enable_all) {
dma               724 drivers/soc/ti/knav_dma.c 		atomic_inc(&dma->ref_count);
dma               725 drivers/soc/ti/knav_dma.c 		knav_dma_hw_init(dma);
dma               726 drivers/soc/ti/knav_dma.c 		dma_hw_enable_all(dma);
dma               730 drivers/soc/ti/knav_dma.c 		dma->name, num_chan, dma->max_rx_flow,
dma               731 drivers/soc/ti/knav_dma.c 		dma->max_tx_chan, dma->max_rx_chan,
dma               732 drivers/soc/ti/knav_dma.c 		dma->loopback ? ", loopback" : "");
dma               789 drivers/soc/ti/knav_dma.c 	struct knav_dma_device *dma;
dma               791 drivers/soc/ti/knav_dma.c 	list_for_each_entry(dma, &kdev->list, list) {
dma               792 drivers/soc/ti/knav_dma.c 		if (atomic_dec_return(&dma->ref_count) == 0)
dma               793 drivers/soc/ti/knav_dma.c 			knav_dma_hw_destroy(dma);
dma                96 drivers/soc/ti/knav_qmss.h 	dma_addr_t	 dma;
dma               640 drivers/soc/ti/knav_qmss_queue.c int knav_queue_push(void *qhandle, dma_addr_t dma,
dma               646 drivers/soc/ti/knav_qmss_queue.c 	val = (u32)dma | ((size / 16) - 1);
dma               665 drivers/soc/ti/knav_qmss_queue.c 	dma_addr_t dma;
dma               683 drivers/soc/ti/knav_qmss_queue.c 	dma = val & DESC_PTR_MASK;
dma               688 drivers/soc/ti/knav_qmss_queue.c 	return dma;
dma               715 drivers/soc/ti/knav_qmss_queue.c 	dma_addr_t dma;
dma               724 drivers/soc/ti/knav_qmss_queue.c 		dma = knav_queue_pop(pool->queue, &size);
dma               725 drivers/soc/ti/knav_qmss_queue.c 		if (!dma)
dma               727 drivers/soc/ti/knav_qmss_queue.c 		desc = knav_pool_desc_dma_to_virt(pool, dma);
dma               747 drivers/soc/ti/knav_qmss_queue.c void *knav_pool_desc_dma_to_virt(void *ph, dma_addr_t dma)
dma               750 drivers/soc/ti/knav_qmss_queue.c 	return pool->region->virt_start + (dma - pool->region->dma_start);
dma               901 drivers/soc/ti/knav_qmss_queue.c 	dma_addr_t dma;
dma               905 drivers/soc/ti/knav_qmss_queue.c 	dma = knav_queue_pop(pool->queue, &size);
dma               906 drivers/soc/ti/knav_qmss_queue.c 	if (unlikely(!dma))
dma               908 drivers/soc/ti/knav_qmss_queue.c 	data = knav_pool_desc_dma_to_virt(pool, dma);
dma               920 drivers/soc/ti/knav_qmss_queue.c 	dma_addr_t dma;
dma               921 drivers/soc/ti/knav_qmss_queue.c 	dma = knav_pool_desc_virt_to_dma(pool, desc);
dma               922 drivers/soc/ti/knav_qmss_queue.c 	knav_queue_push(pool->queue, dma, pool->region->desc_size, 0);
dma               937 drivers/soc/ti/knav_qmss_queue.c 					dma_addr_t *dma, unsigned *dma_sz)
dma               940 drivers/soc/ti/knav_qmss_queue.c 	*dma = knav_pool_desc_virt_to_dma(pool, desc);
dma               944 drivers/soc/ti/knav_qmss_queue.c 	dma_sync_single_for_device(pool->dev, *dma, size, DMA_TO_DEVICE);
dma               962 drivers/soc/ti/knav_qmss_queue.c void *knav_pool_desc_unmap(void *ph, dma_addr_t dma, unsigned dma_sz)
dma               969 drivers/soc/ti/knav_qmss_queue.c 	desc = knav_pool_desc_dma_to_virt(pool, dma);
dma               970 drivers/soc/ti/knav_qmss_queue.c 	dma_sync_single_for_cpu(pool->dev, dma, desc_sz, DMA_FROM_DEVICE);
dma              1168 drivers/soc/ti/knav_qmss_queue.c 			block->dma = (dma_addr_t)temp[0];
dma              1175 drivers/soc/ti/knav_qmss_queue.c 						  8 * block->size, &block->dma,
dma              1196 drivers/soc/ti/knav_qmss_queue.c 			&block->dma, block->virt, block->size);
dma              1197 drivers/soc/ti/knav_qmss_queue.c 		writel_relaxed((u32)block->dma, &qmgr->reg_config->link_ram_base0);
dma              1209 drivers/soc/ti/knav_qmss_queue.c 			&block->dma, block->virt, block->size);
dma              1210 drivers/soc/ti/knav_qmss_queue.c 		writel_relaxed(block->dma, &qmgr->reg_config->link_ram_base1);
dma              1156 drivers/soundwire/cadence_master.c 	struct sdw_cdns_dma_data *dma;
dma              1158 drivers/soundwire/cadence_master.c 	dma = kzalloc(sizeof(*dma), GFP_KERNEL);
dma              1159 drivers/soundwire/cadence_master.c 	if (!dma)
dma              1163 drivers/soundwire/cadence_master.c 		dma->stream_type = SDW_STREAM_PCM;
dma              1165 drivers/soundwire/cadence_master.c 		dma->stream_type = SDW_STREAM_PDM;
dma              1167 drivers/soundwire/cadence_master.c 	dma->bus = &cdns->bus;
dma              1168 drivers/soundwire/cadence_master.c 	dma->link_id = cdns->instance;
dma              1170 drivers/soundwire/cadence_master.c 	dma->stream = stream;
dma              1173 drivers/soundwire/cadence_master.c 		dai->playback_dma_data = dma;
dma              1175 drivers/soundwire/cadence_master.c 		dai->capture_dma_data = dma;
dma               658 drivers/soundwire/intel.c static void intel_port_cleanup(struct sdw_cdns_dma_data *dma)
dma               662 drivers/soundwire/intel.c 	for (i = 0; i < dma->nr_ports; i++) {
dma               663 drivers/soundwire/intel.c 		if (dma->port[i]) {
dma               664 drivers/soundwire/intel.c 			dma->port[i]->pdi->assigned = false;
dma               665 drivers/soundwire/intel.c 			dma->port[i]->pdi = NULL;
dma               666 drivers/soundwire/intel.c 			dma->port[i]->assigned = false;
dma               667 drivers/soundwire/intel.c 			dma->port[i] = NULL;
dma               678 drivers/soundwire/intel.c 	struct sdw_cdns_dma_data *dma;
dma               684 drivers/soundwire/intel.c 	dma = snd_soc_dai_get_dma_data(dai, substream);
dma               685 drivers/soundwire/intel.c 	if (!dma)
dma               694 drivers/soundwire/intel.c 	if (dma->stream_type == SDW_STREAM_PDM) {
dma               696 drivers/soundwire/intel.c 		dma->nr_ports = sdw_cdns_get_stream(cdns, &cdns->pdm, ch, dir);
dma               699 drivers/soundwire/intel.c 		dma->nr_ports = sdw_cdns_get_stream(cdns, &cdns->pcm, ch, dir);
dma               702 drivers/soundwire/intel.c 	if (!dma->nr_ports) {
dma               707 drivers/soundwire/intel.c 	dma->port = kcalloc(dma->nr_ports, sizeof(*dma->port), GFP_KERNEL);
dma               708 drivers/soundwire/intel.c 	if (!dma->port)
dma               711 drivers/soundwire/intel.c 	for (i = 0; i < dma->nr_ports; i++) {
dma               712 drivers/soundwire/intel.c 		dma->port[i] = intel_alloc_port(sdw, ch, dir, pcm);
dma               713 drivers/soundwire/intel.c 		if (!dma->port[i]) {
dma               720 drivers/soundwire/intel.c 	for (i = 0; i < dma->nr_ports; i++) {
dma               722 drivers/soundwire/intel.c 					  dma->port[i]->pdi->intel_alh_id);
dma               730 drivers/soundwire/intel.c 	sconfig.type = dma->stream_type;
dma               732 drivers/soundwire/intel.c 	if (dma->stream_type == SDW_STREAM_PDM) {
dma               740 drivers/soundwire/intel.c 	pconfig = kcalloc(dma->nr_ports, sizeof(*pconfig), GFP_KERNEL);
dma               746 drivers/soundwire/intel.c 	for (i = 0; i < dma->nr_ports; i++) {
dma               747 drivers/soundwire/intel.c 		pconfig[i].num = dma->port[i]->num;
dma               752 drivers/soundwire/intel.c 				    pconfig, dma->nr_ports, dma->stream);
dma               764 drivers/soundwire/intel.c 	intel_port_cleanup(dma);
dma               765 drivers/soundwire/intel.c 	kfree(dma->port);
dma               773 drivers/soundwire/intel.c 	struct sdw_cdns_dma_data *dma;
dma               776 drivers/soundwire/intel.c 	dma = snd_soc_dai_get_dma_data(dai, substream);
dma               777 drivers/soundwire/intel.c 	if (!dma)
dma               780 drivers/soundwire/intel.c 	ret = sdw_stream_remove_master(&cdns->bus, dma->stream);
dma               783 drivers/soundwire/intel.c 			dma->stream->name, ret);
dma               785 drivers/soundwire/intel.c 	intel_port_cleanup(dma);
dma               786 drivers/soundwire/intel.c 	kfree(dma->port);
dma               793 drivers/soundwire/intel.c 	struct sdw_cdns_dma_data *dma;
dma               795 drivers/soundwire/intel.c 	dma = snd_soc_dai_get_dma_data(dai, substream);
dma               796 drivers/soundwire/intel.c 	if (!dma)
dma               800 drivers/soundwire/intel.c 	kfree(dma);
dma               197 drivers/spi/spi-fsl-dspi.c 	struct fsl_dspi_dma			*dma;
dma               249 drivers/spi/spi-fsl-dspi.c 	struct fsl_dspi_dma *dma = dspi->dma;
dma               251 drivers/spi/spi-fsl-dspi.c 	complete(&dma->cmd_tx_complete);
dma               257 drivers/spi/spi-fsl-dspi.c 	struct fsl_dspi_dma *dma = dspi->dma;
dma               261 drivers/spi/spi-fsl-dspi.c 		for (i = 0; i < dma->curr_xfer_len; i++)
dma               262 drivers/spi/spi-fsl-dspi.c 			dspi_push_rx(dspi, dspi->dma->rx_dma_buf[i]);
dma               265 drivers/spi/spi-fsl-dspi.c 	complete(&dma->cmd_rx_complete);
dma               271 drivers/spi/spi-fsl-dspi.c 	struct fsl_dspi_dma *dma = dspi->dma;
dma               275 drivers/spi/spi-fsl-dspi.c 	for (i = 0; i < dma->curr_xfer_len; i++)
dma               276 drivers/spi/spi-fsl-dspi.c 		dspi->dma->tx_dma_buf[i] = dspi_pop_tx_pushr(dspi);
dma               278 drivers/spi/spi-fsl-dspi.c 	dma->tx_desc = dmaengine_prep_slave_single(dma->chan_tx,
dma               279 drivers/spi/spi-fsl-dspi.c 					dma->tx_dma_phys,
dma               280 drivers/spi/spi-fsl-dspi.c 					dma->curr_xfer_len *
dma               284 drivers/spi/spi-fsl-dspi.c 	if (!dma->tx_desc) {
dma               289 drivers/spi/spi-fsl-dspi.c 	dma->tx_desc->callback = dspi_tx_dma_callback;
dma               290 drivers/spi/spi-fsl-dspi.c 	dma->tx_desc->callback_param = dspi;
dma               291 drivers/spi/spi-fsl-dspi.c 	if (dma_submit_error(dmaengine_submit(dma->tx_desc))) {
dma               296 drivers/spi/spi-fsl-dspi.c 	dma->rx_desc = dmaengine_prep_slave_single(dma->chan_rx,
dma               297 drivers/spi/spi-fsl-dspi.c 					dma->rx_dma_phys,
dma               298 drivers/spi/spi-fsl-dspi.c 					dma->curr_xfer_len *
dma               302 drivers/spi/spi-fsl-dspi.c 	if (!dma->rx_desc) {
dma               307 drivers/spi/spi-fsl-dspi.c 	dma->rx_desc->callback = dspi_rx_dma_callback;
dma               308 drivers/spi/spi-fsl-dspi.c 	dma->rx_desc->callback_param = dspi;
dma               309 drivers/spi/spi-fsl-dspi.c 	if (dma_submit_error(dmaengine_submit(dma->rx_desc))) {
dma               314 drivers/spi/spi-fsl-dspi.c 	reinit_completion(&dspi->dma->cmd_rx_complete);
dma               315 drivers/spi/spi-fsl-dspi.c 	reinit_completion(&dspi->dma->cmd_tx_complete);
dma               317 drivers/spi/spi-fsl-dspi.c 	dma_async_issue_pending(dma->chan_rx);
dma               318 drivers/spi/spi-fsl-dspi.c 	dma_async_issue_pending(dma->chan_tx);
dma               321 drivers/spi/spi-fsl-dspi.c 		wait_for_completion_interruptible(&dspi->dma->cmd_rx_complete);
dma               325 drivers/spi/spi-fsl-dspi.c 	time_left = wait_for_completion_timeout(&dspi->dma->cmd_tx_complete,
dma               329 drivers/spi/spi-fsl-dspi.c 		dmaengine_terminate_all(dma->chan_tx);
dma               330 drivers/spi/spi-fsl-dspi.c 		dmaengine_terminate_all(dma->chan_rx);
dma               334 drivers/spi/spi-fsl-dspi.c 	time_left = wait_for_completion_timeout(&dspi->dma->cmd_rx_complete,
dma               338 drivers/spi/spi-fsl-dspi.c 		dmaengine_terminate_all(dma->chan_tx);
dma               339 drivers/spi/spi-fsl-dspi.c 		dmaengine_terminate_all(dma->chan_rx);
dma               350 drivers/spi/spi-fsl-dspi.c 	struct fsl_dspi_dma *dma = dspi->dma;
dma               359 drivers/spi/spi-fsl-dspi.c 		dma->curr_xfer_len = curr_remaining_bytes
dma               361 drivers/spi/spi-fsl-dspi.c 		if (dma->curr_xfer_len > bytes_per_buffer)
dma               362 drivers/spi/spi-fsl-dspi.c 			dma->curr_xfer_len = bytes_per_buffer;
dma               371 drivers/spi/spi-fsl-dspi.c 				dma->curr_xfer_len * dspi->bytes_per_word;
dma               387 drivers/spi/spi-fsl-dspi.c 	struct fsl_dspi_dma *dma;
dma               390 drivers/spi/spi-fsl-dspi.c 	dma = devm_kzalloc(dev, sizeof(*dma), GFP_KERNEL);
dma               391 drivers/spi/spi-fsl-dspi.c 	if (!dma)
dma               394 drivers/spi/spi-fsl-dspi.c 	dma->chan_rx = dma_request_slave_channel(dev, "rx");
dma               395 drivers/spi/spi-fsl-dspi.c 	if (!dma->chan_rx) {
dma               401 drivers/spi/spi-fsl-dspi.c 	dma->chan_tx = dma_request_slave_channel(dev, "tx");
dma               402 drivers/spi/spi-fsl-dspi.c 	if (!dma->chan_tx) {
dma               408 drivers/spi/spi-fsl-dspi.c 	dma->tx_dma_buf = dma_alloc_coherent(dev, DSPI_DMA_BUFSIZE,
dma               409 drivers/spi/spi-fsl-dspi.c 					     &dma->tx_dma_phys, GFP_KERNEL);
dma               410 drivers/spi/spi-fsl-dspi.c 	if (!dma->tx_dma_buf) {
dma               415 drivers/spi/spi-fsl-dspi.c 	dma->rx_dma_buf = dma_alloc_coherent(dev, DSPI_DMA_BUFSIZE,
dma               416 drivers/spi/spi-fsl-dspi.c 					     &dma->rx_dma_phys, GFP_KERNEL);
dma               417 drivers/spi/spi-fsl-dspi.c 	if (!dma->rx_dma_buf) {
dma               430 drivers/spi/spi-fsl-dspi.c 	ret = dmaengine_slave_config(dma->chan_rx, &cfg);
dma               438 drivers/spi/spi-fsl-dspi.c 	ret = dmaengine_slave_config(dma->chan_tx, &cfg);
dma               445 drivers/spi/spi-fsl-dspi.c 	dspi->dma = dma;
dma               446 drivers/spi/spi-fsl-dspi.c 	init_completion(&dma->cmd_tx_complete);
dma               447 drivers/spi/spi-fsl-dspi.c 	init_completion(&dma->cmd_rx_complete);
dma               453 drivers/spi/spi-fsl-dspi.c 			dma->rx_dma_buf, dma->rx_dma_phys);
dma               456 drivers/spi/spi-fsl-dspi.c 			dma->tx_dma_buf, dma->tx_dma_phys);
dma               458 drivers/spi/spi-fsl-dspi.c 	dma_release_channel(dma->chan_tx);
dma               460 drivers/spi/spi-fsl-dspi.c 	dma_release_channel(dma->chan_rx);
dma               462 drivers/spi/spi-fsl-dspi.c 	devm_kfree(dev, dma);
dma               463 drivers/spi/spi-fsl-dspi.c 	dspi->dma = NULL;
dma               470 drivers/spi/spi-fsl-dspi.c 	struct fsl_dspi_dma *dma = dspi->dma;
dma               473 drivers/spi/spi-fsl-dspi.c 	if (!dma)
dma               476 drivers/spi/spi-fsl-dspi.c 	if (dma->chan_tx) {
dma               477 drivers/spi/spi-fsl-dspi.c 		dma_unmap_single(dev, dma->tx_dma_phys,
dma               479 drivers/spi/spi-fsl-dspi.c 		dma_release_channel(dma->chan_tx);
dma               482 drivers/spi/spi-fsl-dspi.c 	if (dma->chan_rx) {
dma               483 drivers/spi/spi-fsl-dspi.c 		dma_unmap_single(dev, dma->rx_dma_phys,
dma               485 drivers/spi/spi-fsl-dspi.c 		dma_release_channel(dma->chan_rx);
dma               242 drivers/spi/spi-s3c64xx.c 	struct s3c64xx_spi_dma_data *dma = data;
dma               245 drivers/spi/spi-s3c64xx.c 	if (dma->direction == DMA_DEV_TO_MEM)
dma               254 drivers/spi/spi-s3c64xx.c 	if (dma->direction == DMA_DEV_TO_MEM) {
dma               267 drivers/spi/spi-s3c64xx.c static void prepare_dma(struct s3c64xx_spi_dma_data *dma,
dma               276 drivers/spi/spi-s3c64xx.c 	if (dma->direction == DMA_DEV_TO_MEM) {
dma               277 drivers/spi/spi-s3c64xx.c 		sdd = container_of((void *)dma,
dma               279 drivers/spi/spi-s3c64xx.c 		config.direction = dma->direction;
dma               283 drivers/spi/spi-s3c64xx.c 		dmaengine_slave_config(dma->ch, &config);
dma               285 drivers/spi/spi-s3c64xx.c 		sdd = container_of((void *)dma,
dma               287 drivers/spi/spi-s3c64xx.c 		config.direction = dma->direction;
dma               291 drivers/spi/spi-s3c64xx.c 		dmaengine_slave_config(dma->ch, &config);
dma               294 drivers/spi/spi-s3c64xx.c 	desc = dmaengine_prep_slave_sg(dma->ch, sgt->sgl, sgt->nents,
dma               295 drivers/spi/spi-s3c64xx.c 				       dma->direction, DMA_PREP_INTERRUPT);
dma               298 drivers/spi/spi-s3c64xx.c 	desc->callback_param = dma;
dma               301 drivers/spi/spi-s3c64xx.c 	dma_async_issue_pending(dma->ch);
dma               165 drivers/spi/spi-sprd.c 	struct sprd_spi_dma dma;
dma               519 drivers/spi/spi-sprd.c 	struct dma_chan *dma_chan = ss->dma.dma_chan[SPRD_SPI_RX];
dma               522 drivers/spi/spi-sprd.c 		.src_addr_width = ss->dma.width,
dma               523 drivers/spi/spi-sprd.c 		.dst_addr_width = ss->dma.width,
dma               524 drivers/spi/spi-sprd.c 		.dst_maxburst = ss->dma.fragmens_len,
dma               532 drivers/spi/spi-sprd.c 	return ss->dma.rx_len;
dma               537 drivers/spi/spi-sprd.c 	struct dma_chan *dma_chan = ss->dma.dma_chan[SPRD_SPI_TX];
dma               540 drivers/spi/spi-sprd.c 		.src_addr_width = ss->dma.width,
dma               541 drivers/spi/spi-sprd.c 		.dst_addr_width = ss->dma.width,
dma               542 drivers/spi/spi-sprd.c 		.src_maxburst = ss->dma.fragmens_len,
dma               555 drivers/spi/spi-sprd.c 	ss->dma.dma_chan[SPRD_SPI_RX] = dma_request_chan(ss->dev, "rx_chn");
dma               556 drivers/spi/spi-sprd.c 	if (IS_ERR_OR_NULL(ss->dma.dma_chan[SPRD_SPI_RX])) {
dma               557 drivers/spi/spi-sprd.c 		if (PTR_ERR(ss->dma.dma_chan[SPRD_SPI_RX]) == -EPROBE_DEFER)
dma               558 drivers/spi/spi-sprd.c 			return PTR_ERR(ss->dma.dma_chan[SPRD_SPI_RX]);
dma               561 drivers/spi/spi-sprd.c 		return PTR_ERR(ss->dma.dma_chan[SPRD_SPI_RX]);
dma               564 drivers/spi/spi-sprd.c 	ss->dma.dma_chan[SPRD_SPI_TX]  = dma_request_chan(ss->dev, "tx_chn");
dma               565 drivers/spi/spi-sprd.c 	if (IS_ERR_OR_NULL(ss->dma.dma_chan[SPRD_SPI_TX])) {
dma               566 drivers/spi/spi-sprd.c 		if (PTR_ERR(ss->dma.dma_chan[SPRD_SPI_TX]) == -EPROBE_DEFER)
dma               567 drivers/spi/spi-sprd.c 			return PTR_ERR(ss->dma.dma_chan[SPRD_SPI_TX]);
dma               570 drivers/spi/spi-sprd.c 		dma_release_channel(ss->dma.dma_chan[SPRD_SPI_RX]);
dma               571 drivers/spi/spi-sprd.c 		return PTR_ERR(ss->dma.dma_chan[SPRD_SPI_TX]);
dma               579 drivers/spi/spi-sprd.c 	if (ss->dma.dma_chan[SPRD_SPI_RX])
dma               580 drivers/spi/spi-sprd.c 		dma_release_channel(ss->dma.dma_chan[SPRD_SPI_RX]);
dma               582 drivers/spi/spi-sprd.c 	if (ss->dma.dma_chan[SPRD_SPI_TX])
dma               583 drivers/spi/spi-sprd.c 		dma_release_channel(ss->dma.dma_chan[SPRD_SPI_TX]);
dma               632 drivers/spi/spi-sprd.c 		ss->dma.rx_len = t->len > ss->dma.fragmens_len ?
dma               633 drivers/spi/spi-sprd.c 			(t->len - t->len % ss->dma.fragmens_len) :
dma               649 drivers/spi/spi-sprd.c 		ret = ss->dma.rx_len;
dma               744 drivers/spi/spi-sprd.c 		ss->dma.width = DMA_SLAVE_BUSWIDTH_1_BYTE;
dma               745 drivers/spi/spi-sprd.c 		ss->dma.fragmens_len = SPRD_SPI_DMA_STEP;
dma               751 drivers/spi/spi-sprd.c 		ss->dma.width = DMA_SLAVE_BUSWIDTH_2_BYTES;
dma               752 drivers/spi/spi-sprd.c 		ss->dma.fragmens_len = SPRD_SPI_DMA_STEP << 1;
dma               758 drivers/spi/spi-sprd.c 		ss->dma.width = DMA_SLAVE_BUSWIDTH_4_BYTES;
dma               759 drivers/spi/spi-sprd.c 		ss->dma.fragmens_len = SPRD_SPI_DMA_STEP << 2;
dma               828 drivers/spi/spi-sprd.c 		if (ss->dma.rx_len < ss->len) {
dma               829 drivers/spi/spi-sprd.c 			ss->rx_buf += ss->dma.rx_len;
dma               830 drivers/spi/spi-sprd.c 			ss->dma.rx_len +=
dma               831 drivers/spi/spi-sprd.c 				ss->read_bufs(ss, ss->len - ss->dma.rx_len);
dma               893 drivers/spi/spi-sprd.c 	return ss->dma.enable && (t->len > SPRD_SPI_FIFO_SIZE);
dma               912 drivers/spi/spi-sprd.c 	ss->dma.enable = true;
dma              1017 drivers/spi/spi-sprd.c 	if (ss->dma.enable)
dma              1031 drivers/spi/spi-sprd.c 	if (ss->dma.enable)
dma              1049 drivers/spi/spi-sprd.c 	if (!ss->dma.enable)
dma               180 drivers/spi/spi-topcliff-pch.c 	struct pch_spi_dma_ctrl dma;
dma               751 drivers/spi/spi-topcliff-pch.c 		rx_dma_buf = data->dma.rx_buf_virt;
dma               757 drivers/spi/spi-topcliff-pch.c 		rx_dma_sbuf = data->dma.rx_buf_virt;
dma               766 drivers/spi/spi-topcliff-pch.c 	struct pch_spi_dma_ctrl *dma;
dma               770 drivers/spi/spi-topcliff-pch.c 	dma = &data->dma;
dma               790 drivers/spi/spi-topcliff-pch.c 	dma_sync_sg_for_cpu(&data->master->dev, dma->sg_rx_p, dma->nent,
dma               793 drivers/spi/spi-topcliff-pch.c 	dma_sync_sg_for_cpu(&data->master->dev, dma->sg_tx_p, dma->nent,
dma               795 drivers/spi/spi-topcliff-pch.c 	memset(data->dma.tx_buf_virt, 0, PAGE_SIZE);
dma               797 drivers/spi/spi-topcliff-pch.c 	async_tx_ack(dma->desc_rx);
dma               798 drivers/spi/spi-topcliff-pch.c 	async_tx_ack(dma->desc_tx);
dma               799 drivers/spi/spi-topcliff-pch.c 	kfree(dma->sg_tx_p);
dma               800 drivers/spi/spi-topcliff-pch.c 	kfree(dma->sg_rx_p);
dma               847 drivers/spi/spi-topcliff-pch.c 	struct pch_spi_dma_ctrl *dma;
dma               855 drivers/spi/spi-topcliff-pch.c 	dma = &data->dma;
dma               864 drivers/spi/spi-topcliff-pch.c 	param = &dma->param_tx;
dma               876 drivers/spi/spi-topcliff-pch.c 	dma->chan_tx = chan;
dma               879 drivers/spi/spi-topcliff-pch.c 	param = &dma->param_rx;
dma               888 drivers/spi/spi-topcliff-pch.c 		dma_release_channel(dma->chan_tx);
dma               889 drivers/spi/spi-topcliff-pch.c 		dma->chan_tx = NULL;
dma               893 drivers/spi/spi-topcliff-pch.c 	dma->chan_rx = chan;
dma               898 drivers/spi/spi-topcliff-pch.c 	struct pch_spi_dma_ctrl *dma;
dma               900 drivers/spi/spi-topcliff-pch.c 	dma = &data->dma;
dma               901 drivers/spi/spi-topcliff-pch.c 	if (dma->chan_tx) {
dma               902 drivers/spi/spi-topcliff-pch.c 		dma_release_channel(dma->chan_tx);
dma               903 drivers/spi/spi-topcliff-pch.c 		dma->chan_tx = NULL;
dma               905 drivers/spi/spi-topcliff-pch.c 	if (dma->chan_rx) {
dma               906 drivers/spi/spi-topcliff-pch.c 		dma_release_channel(dma->chan_rx);
dma               907 drivers/spi/spi-topcliff-pch.c 		dma->chan_rx = NULL;
dma               926 drivers/spi/spi-topcliff-pch.c 	struct pch_spi_dma_ctrl *dma;
dma               928 drivers/spi/spi-topcliff-pch.c 	dma = &data->dma;
dma               962 drivers/spi/spi-topcliff-pch.c 			tx_dma_buf = dma->tx_buf_virt;
dma               967 drivers/spi/spi-topcliff-pch.c 			tx_dma_sbuf = dma->tx_buf_virt;
dma              1001 drivers/spi/spi-topcliff-pch.c 	dma->sg_rx_p = kcalloc(num, sizeof(*dma->sg_rx_p), GFP_ATOMIC);
dma              1002 drivers/spi/spi-topcliff-pch.c 	if (!dma->sg_rx_p)
dma              1005 drivers/spi/spi-topcliff-pch.c 	sg_init_table(dma->sg_rx_p, num); /* Initialize SG table */
dma              1007 drivers/spi/spi-topcliff-pch.c 	sg = dma->sg_rx_p;
dma              1012 drivers/spi/spi-topcliff-pch.c 			sg_set_page(sg, virt_to_page(dma->rx_buf_virt), rem,
dma              1018 drivers/spi/spi-topcliff-pch.c 			sg_set_page(sg, virt_to_page(dma->rx_buf_virt), size,
dma              1024 drivers/spi/spi-topcliff-pch.c 			sg_set_page(sg, virt_to_page(dma->rx_buf_virt), size,
dma              1028 drivers/spi/spi-topcliff-pch.c 		sg_dma_address(sg) = dma->rx_buf_dma + sg->offset;
dma              1030 drivers/spi/spi-topcliff-pch.c 	sg = dma->sg_rx_p;
dma              1031 drivers/spi/spi-topcliff-pch.c 	desc_rx = dmaengine_prep_slave_sg(dma->chan_rx, sg,
dma              1042 drivers/spi/spi-topcliff-pch.c 	dma->nent = num;
dma              1043 drivers/spi/spi-topcliff-pch.c 	dma->desc_rx = desc_rx;
dma              1064 drivers/spi/spi-topcliff-pch.c 	dma->sg_tx_p = kcalloc(num, sizeof(*dma->sg_tx_p), GFP_ATOMIC);
dma              1065 drivers/spi/spi-topcliff-pch.c 	if (!dma->sg_tx_p)
dma              1068 drivers/spi/spi-topcliff-pch.c 	sg_init_table(dma->sg_tx_p, num); /* Initialize SG table */
dma              1070 drivers/spi/spi-topcliff-pch.c 	sg = dma->sg_tx_p;
dma              1074 drivers/spi/spi-topcliff-pch.c 			sg_set_page(sg, virt_to_page(dma->tx_buf_virt), size + head,
dma              1080 drivers/spi/spi-topcliff-pch.c 			sg_set_page(sg, virt_to_page(dma->tx_buf_virt), rem,
dma              1086 drivers/spi/spi-topcliff-pch.c 			sg_set_page(sg, virt_to_page(dma->tx_buf_virt), size,
dma              1090 drivers/spi/spi-topcliff-pch.c 		sg_dma_address(sg) = dma->tx_buf_dma + sg->offset;
dma              1092 drivers/spi/spi-topcliff-pch.c 	sg = dma->sg_tx_p;
dma              1093 drivers/spi/spi-topcliff-pch.c 	desc_tx = dmaengine_prep_slave_sg(dma->chan_tx,
dma              1104 drivers/spi/spi-topcliff-pch.c 	dma->nent = num;
dma              1105 drivers/spi/spi-topcliff-pch.c 	dma->desc_tx = desc_tx;
dma              1283 drivers/spi/spi-topcliff-pch.c 	struct pch_spi_dma_ctrl *dma;
dma              1285 drivers/spi/spi-topcliff-pch.c 	dma = &data->dma;
dma              1286 drivers/spi/spi-topcliff-pch.c 	if (dma->tx_buf_dma)
dma              1288 drivers/spi/spi-topcliff-pch.c 				  dma->tx_buf_virt, dma->tx_buf_dma);
dma              1289 drivers/spi/spi-topcliff-pch.c 	if (dma->rx_buf_dma)
dma              1291 drivers/spi/spi-topcliff-pch.c 				  dma->rx_buf_virt, dma->rx_buf_dma);
dma              1297 drivers/spi/spi-topcliff-pch.c 	struct pch_spi_dma_ctrl *dma;
dma              1300 drivers/spi/spi-topcliff-pch.c 	dma = &data->dma;
dma              1303 drivers/spi/spi-topcliff-pch.c 	dma->tx_buf_virt = dma_alloc_coherent(&board_dat->pdev->dev,
dma              1304 drivers/spi/spi-topcliff-pch.c 				PCH_BUF_SIZE, &dma->tx_buf_dma, GFP_KERNEL);
dma              1305 drivers/spi/spi-topcliff-pch.c 	if (!dma->tx_buf_virt)
dma              1309 drivers/spi/spi-topcliff-pch.c 	dma->rx_buf_virt = dma_alloc_coherent(&board_dat->pdev->dev,
dma              1310 drivers/spi/spi-topcliff-pch.c 				PCH_BUF_SIZE, &dma->rx_buf_dma, GFP_KERNEL);
dma              1311 drivers/spi/spi-topcliff-pch.c 	if (!dma->rx_buf_virt)
dma               100 drivers/staging/comedi/drivers/comedi_isadma.c unsigned int comedi_isadma_poll(struct comedi_isadma *dma)
dma               102 drivers/staging/comedi/drivers/comedi_isadma.c 	struct comedi_isadma_desc *desc = &dma->desc[dma->cur_dma];
dma               158 drivers/staging/comedi/drivers/comedi_isadma.c 	struct comedi_isadma *dma = NULL;
dma               166 drivers/staging/comedi/drivers/comedi_isadma.c 	dma = kzalloc(sizeof(*dma), GFP_KERNEL);
dma               167 drivers/staging/comedi/drivers/comedi_isadma.c 	if (!dma)
dma               173 drivers/staging/comedi/drivers/comedi_isadma.c 	dma->desc = desc;
dma               174 drivers/staging/comedi/drivers/comedi_isadma.c 	dma->n_desc = n_desc;
dma               176 drivers/staging/comedi/drivers/comedi_isadma.c 		dma->dev = dev->hw_dev;
dma               186 drivers/staging/comedi/drivers/comedi_isadma.c 		dma->dev = dev->class_dev;
dma               197 drivers/staging/comedi/drivers/comedi_isadma.c 	dma->chan = dma_chans[0];
dma               202 drivers/staging/comedi/drivers/comedi_isadma.c 	dma->chan2 = dma_chans[1];
dma               205 drivers/staging/comedi/drivers/comedi_isadma.c 		desc = &dma->desc[i];
dma               208 drivers/staging/comedi/drivers/comedi_isadma.c 		desc->virt_addr = dma_alloc_coherent(dma->dev, desc->maxsize,
dma               216 drivers/staging/comedi/drivers/comedi_isadma.c 	return dma;
dma               219 drivers/staging/comedi/drivers/comedi_isadma.c 	comedi_isadma_free(dma);
dma               228 drivers/staging/comedi/drivers/comedi_isadma.c void comedi_isadma_free(struct comedi_isadma *dma)
dma               233 drivers/staging/comedi/drivers/comedi_isadma.c 	if (!dma)
dma               236 drivers/staging/comedi/drivers/comedi_isadma.c 	if (dma->desc) {
dma               237 drivers/staging/comedi/drivers/comedi_isadma.c 		for (i = 0; i < dma->n_desc; i++) {
dma               238 drivers/staging/comedi/drivers/comedi_isadma.c 			desc = &dma->desc[i];
dma               240 drivers/staging/comedi/drivers/comedi_isadma.c 				dma_free_coherent(dma->dev, desc->maxsize,
dma               244 drivers/staging/comedi/drivers/comedi_isadma.c 		kfree(dma->desc);
dma               246 drivers/staging/comedi/drivers/comedi_isadma.c 	if (dma->chan2 && dma->chan2 != dma->chan)
dma               247 drivers/staging/comedi/drivers/comedi_isadma.c 		free_dma(dma->chan2);
dma               248 drivers/staging/comedi/drivers/comedi_isadma.c 	if (dma->chan)
dma               249 drivers/staging/comedi/drivers/comedi_isadma.c 		free_dma(dma->chan);
dma               250 drivers/staging/comedi/drivers/comedi_isadma.c 	kfree(dma);
dma                64 drivers/staging/comedi/drivers/comedi_isadma.h unsigned int comedi_isadma_poll(struct comedi_isadma *dma);
dma                71 drivers/staging/comedi/drivers/comedi_isadma.h void comedi_isadma_free(struct comedi_isadma *dma);
dma                90 drivers/staging/comedi/drivers/comedi_isadma.h static inline unsigned int comedi_isadma_poll(struct comedi_isadma *dma)
dma               108 drivers/staging/comedi/drivers/comedi_isadma.h static inline void comedi_isadma_free(struct comedi_isadma *dma)
dma               433 drivers/staging/comedi/drivers/das16.c 	struct comedi_isadma	*dma;
dma               450 drivers/staging/comedi/drivers/das16.c 	struct comedi_isadma *dma = devpriv->dma;
dma               451 drivers/staging/comedi/drivers/das16.c 	struct comedi_isadma_desc *desc = &dma->desc[dma->cur_dma];
dma               473 drivers/staging/comedi/drivers/das16.c 	struct comedi_isadma *dma = devpriv->dma;
dma               474 drivers/staging/comedi/drivers/das16.c 	struct comedi_isadma_desc *desc = &dma->desc[dma->cur_dma];
dma               506 drivers/staging/comedi/drivers/das16.c 		dma->cur_dma = 1 - dma->cur_dma;
dma               698 drivers/staging/comedi/drivers/das16.c 	struct comedi_isadma *dma = devpriv->dma;
dma               737 drivers/staging/comedi/drivers/das16.c 	dma->cur_dma = 0;
dma               765 drivers/staging/comedi/drivers/das16.c 	struct comedi_isadma *dma = devpriv->dma;
dma               775 drivers/staging/comedi/drivers/das16.c 	comedi_isadma_disable(dma->chan);
dma               936 drivers/staging/comedi/drivers/das16.c 	devpriv->dma = comedi_isadma_alloc(dev, 2, dma_chan, dma_chan,
dma               946 drivers/staging/comedi/drivers/das16.c 		comedi_isadma_free(devpriv->dma);
dma              1100 drivers/staging/comedi/drivers/das16.c 	if (devpriv->dma) {
dma               314 drivers/staging/comedi/drivers/das1800.c 	struct comedi_isadma *dma;
dma               384 drivers/staging/comedi/drivers/das1800.c 	struct comedi_isadma *dma = devpriv->dma;
dma               385 drivers/staging/comedi/drivers/das1800.c 	struct comedi_isadma_desc *desc = &dma->desc[dma->cur_dma];
dma               392 drivers/staging/comedi/drivers/das1800.c 		dma->cur_dma = 1 - dma->cur_dma;
dma               393 drivers/staging/comedi/drivers/das1800.c 		desc = &dma->desc[dma->cur_dma];
dma               405 drivers/staging/comedi/drivers/das1800.c 	struct comedi_isadma *dma = devpriv->dma;
dma               406 drivers/staging/comedi/drivers/das1800.c 	struct comedi_isadma_desc *desc = &dma->desc[dma->cur_dma];
dma               419 drivers/staging/comedi/drivers/das1800.c 			dma->cur_dma = 1 - dma->cur_dma;
dma               427 drivers/staging/comedi/drivers/das1800.c 	struct comedi_isadma *dma = devpriv->dma;
dma               436 drivers/staging/comedi/drivers/das1800.c 	if (dma) {
dma               438 drivers/staging/comedi/drivers/das1800.c 			desc = &dma->desc[i];
dma               782 drivers/staging/comedi/drivers/das1800.c 	struct comedi_isadma *dma = devpriv->dma;
dma               789 drivers/staging/comedi/drivers/das1800.c 	dma->cur_dma = 0;
dma               790 drivers/staging/comedi/drivers/das1800.c 	desc = &dma->desc[0];
dma               800 drivers/staging/comedi/drivers/das1800.c 		desc = &dma->desc[1];
dma              1083 drivers/staging/comedi/drivers/das1800.c 	devpriv->dma = comedi_isadma_alloc(dev, dma_chan[1] ? 2 : 1,
dma              1086 drivers/staging/comedi/drivers/das1800.c 	if (!devpriv->dma)
dma              1095 drivers/staging/comedi/drivers/das1800.c 		comedi_isadma_free(devpriv->dma);
dma               305 drivers/staging/comedi/drivers/dt282x.c 	struct comedi_isadma *dma;
dma               319 drivers/staging/comedi/drivers/dt282x.c 	struct comedi_isadma *dma = devpriv->dma;
dma               320 drivers/staging/comedi/drivers/dt282x.c 	struct comedi_isadma_desc *desc = &dma->desc[dma_index];
dma               342 drivers/staging/comedi/drivers/dt282x.c 	struct comedi_isadma *dma = devpriv->dma;
dma               343 drivers/staging/comedi/drivers/dt282x.c 	struct comedi_isadma_desc *desc = &dma->desc[dma_index];
dma               356 drivers/staging/comedi/drivers/dt282x.c 	struct comedi_isadma *dma = devpriv->dma;
dma               361 drivers/staging/comedi/drivers/dt282x.c 		desc = &dma->desc[i];
dma               427 drivers/staging/comedi/drivers/dt282x.c 	struct comedi_isadma *dma = devpriv->dma;
dma               428 drivers/staging/comedi/drivers/dt282x.c 	struct comedi_isadma_desc *desc = &dma->desc[cur_dma];
dma               445 drivers/staging/comedi/drivers/dt282x.c 	struct comedi_isadma *dma = devpriv->dma;
dma               446 drivers/staging/comedi/drivers/dt282x.c 	struct comedi_isadma_desc *desc = &dma->desc[dma->cur_dma];
dma               453 drivers/staging/comedi/drivers/dt282x.c 	if (!dt282x_ao_setup_dma(dev, s, dma->cur_dma))
dma               456 drivers/staging/comedi/drivers/dt282x.c 	dma->cur_dma = 1 - dma->cur_dma;
dma               463 drivers/staging/comedi/drivers/dt282x.c 	struct comedi_isadma *dma = devpriv->dma;
dma               464 drivers/staging/comedi/drivers/dt282x.c 	struct comedi_isadma_desc *desc = &dma->desc[dma->cur_dma];
dma               496 drivers/staging/comedi/drivers/dt282x.c 	dt282x_prep_ai_dma(dev, dma->cur_dma, 0);
dma               498 drivers/staging/comedi/drivers/dt282x.c 	dma->cur_dma = 1 - dma->cur_dma;
dma               721 drivers/staging/comedi/drivers/dt282x.c 	struct comedi_isadma *dma = devpriv->dma;
dma               744 drivers/staging/comedi/drivers/dt282x.c 	dma->cur_dma = 0;
dma               907 drivers/staging/comedi/drivers/dt282x.c 	struct comedi_isadma *dma = devpriv->dma;
dma               925 drivers/staging/comedi/drivers/dt282x.c 	dma->cur_dma = 0;
dma              1053 drivers/staging/comedi/drivers/dt282x.c 	devpriv->dma = comedi_isadma_alloc(dev, 2, dma_chan[0], dma_chan[1],
dma              1055 drivers/staging/comedi/drivers/dt282x.c 	if (!devpriv->dma)
dma              1066 drivers/staging/comedi/drivers/dt282x.c 		comedi_isadma_free(devpriv->dma);
dma               125 drivers/staging/comedi/drivers/ni_at_a2150.c 	struct comedi_isadma *dma;
dma               136 drivers/staging/comedi/drivers/ni_at_a2150.c 	struct comedi_isadma *dma = devpriv->dma;
dma               137 drivers/staging/comedi/drivers/ni_at_a2150.c 	struct comedi_isadma_desc *desc = &dma->desc[0];
dma               225 drivers/staging/comedi/drivers/ni_at_a2150.c 	struct comedi_isadma *dma = devpriv->dma;
dma               226 drivers/staging/comedi/drivers/ni_at_a2150.c 	struct comedi_isadma_desc *desc = &dma->desc[0];
dma               467 drivers/staging/comedi/drivers/ni_at_a2150.c 	struct comedi_isadma *dma = devpriv->dma;
dma               468 drivers/staging/comedi/drivers/ni_at_a2150.c 	struct comedi_isadma_desc *desc = &dma->desc[0];
dma               656 drivers/staging/comedi/drivers/ni_at_a2150.c 	devpriv->dma = comedi_isadma_alloc(dev, 1, dma_chan, dma_chan,
dma               659 drivers/staging/comedi/drivers/ni_at_a2150.c 	if (!devpriv->dma) {
dma               673 drivers/staging/comedi/drivers/ni_at_a2150.c 		comedi_isadma_free(devpriv->dma);
dma                24 drivers/staging/comedi/drivers/ni_labpc.h 	struct comedi_isadma *dma;
dma               651 drivers/staging/comedi/drivers/ni_labpc_common.c 	if (devpriv->dma &&
dma                55 drivers/staging/comedi/drivers/ni_labpc_isadma.c 	struct comedi_isadma_desc *desc = &devpriv->dma->desc[0];
dma                75 drivers/staging/comedi/drivers/ni_labpc_isadma.c 	struct comedi_isadma_desc *desc = &devpriv->dma->desc[0];
dma               118 drivers/staging/comedi/drivers/ni_labpc_isadma.c 	struct comedi_isadma_desc *desc = &devpriv->dma->desc[0];
dma               153 drivers/staging/comedi/drivers/ni_labpc_isadma.c 	devpriv->dma = comedi_isadma_alloc(dev, 1, dma_chan, dma_chan,
dma               164 drivers/staging/comedi/drivers/ni_labpc_isadma.c 		comedi_isadma_free(devpriv->dma);
dma               512 drivers/staging/comedi/drivers/pcl812.c 	struct comedi_isadma *dma;
dma               530 drivers/staging/comedi/drivers/pcl812.c 	struct comedi_isadma *dma = devpriv->dma;
dma               531 drivers/staging/comedi/drivers/pcl812.c 	struct comedi_isadma_desc *desc = &dma->desc[dma->cur_dma];
dma               536 drivers/staging/comedi/drivers/pcl812.c 	comedi_isadma_disable(dma->chan);
dma               707 drivers/staging/comedi/drivers/pcl812.c 	struct comedi_isadma *dma = devpriv->dma;
dma               714 drivers/staging/comedi/drivers/pcl812.c 	if (dma) {	/*  check if we can use DMA transfer */
dma               739 drivers/staging/comedi/drivers/pcl812.c 		dma->cur_dma = 0;
dma               819 drivers/staging/comedi/drivers/pcl812.c 	struct comedi_isadma *dma = devpriv->dma;
dma               820 drivers/staging/comedi/drivers/pcl812.c 	struct comedi_isadma_desc *desc = &dma->desc[dma->cur_dma];
dma               830 drivers/staging/comedi/drivers/pcl812.c 	dma->cur_dma = 1 - dma->cur_dma;
dma               861 drivers/staging/comedi/drivers/pcl812.c 	struct comedi_isadma *dma = devpriv->dma;
dma               873 drivers/staging/comedi/drivers/pcl812.c 	poll = comedi_isadma_poll(dma);
dma               876 drivers/staging/comedi/drivers/pcl812.c 		desc = &dma->desc[dma->cur_dma];
dma               900 drivers/staging/comedi/drivers/pcl812.c 		comedi_isadma_disable(devpriv->dma->chan);
dma              1118 drivers/staging/comedi/drivers/pcl812.c 	devpriv->dma = comedi_isadma_alloc(dev, 2, dma_chan, dma_chan,
dma              1127 drivers/staging/comedi/drivers/pcl812.c 		comedi_isadma_free(devpriv->dma);
dma               106 drivers/staging/comedi/drivers/pcl816.c 	struct comedi_isadma *dma;
dma               117 drivers/staging/comedi/drivers/pcl816.c 	struct comedi_isadma *dma = devpriv->dma;
dma               118 drivers/staging/comedi/drivers/pcl816.c 	struct comedi_isadma_desc *desc = &dma->desc[dma->cur_dma];
dma               122 drivers/staging/comedi/drivers/pcl816.c 	comedi_isadma_disable(dma->chan);
dma               246 drivers/staging/comedi/drivers/pcl816.c 	struct comedi_isadma *dma = devpriv->dma;
dma               247 drivers/staging/comedi/drivers/pcl816.c 	struct comedi_isadma_desc *desc = &dma->desc[dma->cur_dma];
dma               268 drivers/staging/comedi/drivers/pcl816.c 	dma->cur_dma = 1 - dma->cur_dma;
dma               408 drivers/staging/comedi/drivers/pcl816.c 	struct comedi_isadma *dma = devpriv->dma;
dma               427 drivers/staging/comedi/drivers/pcl816.c 	dma->cur_dma = 0;
dma               444 drivers/staging/comedi/drivers/pcl816.c 	outb((dma->chan << 4) | dev->irq,
dma               453 drivers/staging/comedi/drivers/pcl816.c 	struct comedi_isadma *dma = devpriv->dma;
dma               461 drivers/staging/comedi/drivers/pcl816.c 	poll = comedi_isadma_poll(dma);
dma               464 drivers/staging/comedi/drivers/pcl816.c 		desc = &dma->desc[dma->cur_dma];
dma               586 drivers/staging/comedi/drivers/pcl816.c 	devpriv->dma = comedi_isadma_alloc(dev, 2, dma_chan, dma_chan,
dma               588 drivers/staging/comedi/drivers/pcl816.c 	if (!devpriv->dma)
dma               599 drivers/staging/comedi/drivers/pcl816.c 		comedi_isadma_free(devpriv->dma);
dma               289 drivers/staging/comedi/drivers/pcl818.c 	struct comedi_isadma *dma;
dma               306 drivers/staging/comedi/drivers/pcl818.c 	struct comedi_isadma *dma = devpriv->dma;
dma               307 drivers/staging/comedi/drivers/pcl818.c 	struct comedi_isadma_desc *desc = &dma->desc[dma->cur_dma];
dma               311 drivers/staging/comedi/drivers/pcl818.c 	comedi_isadma_disable(dma->chan);
dma               436 drivers/staging/comedi/drivers/pcl818.c 			(devpriv->dma) ? "DMA" :
dma               478 drivers/staging/comedi/drivers/pcl818.c 	struct comedi_isadma *dma = devpriv->dma;
dma               479 drivers/staging/comedi/drivers/pcl818.c 	struct comedi_isadma_desc *desc = &dma->desc[dma->cur_dma];
dma               487 drivers/staging/comedi/drivers/pcl818.c 	dma->cur_dma = 1 - dma->cur_dma;
dma               558 drivers/staging/comedi/drivers/pcl818.c 	if (devpriv->dma)
dma               712 drivers/staging/comedi/drivers/pcl818.c 	struct comedi_isadma *dma = devpriv->dma;
dma               736 drivers/staging/comedi/drivers/pcl818.c 	if (dma) {
dma               738 drivers/staging/comedi/drivers/pcl818.c 		dma->cur_dma = 0;
dma               763 drivers/staging/comedi/drivers/pcl818.c 	struct comedi_isadma *dma = devpriv->dma;
dma               769 drivers/staging/comedi/drivers/pcl818.c 	if (dma) {
dma               782 drivers/staging/comedi/drivers/pcl818.c 		comedi_isadma_disable(dma->chan);
dma               969 drivers/staging/comedi/drivers/pcl818.c 	devpriv->dma = comedi_isadma_alloc(dev, 2, dma_chan, dma_chan,
dma               978 drivers/staging/comedi/drivers/pcl818.c 		comedi_isadma_free(devpriv->dma);
dma               167 drivers/staging/emxx_udc/emxx_udc.c 	udc->ep0_req.req.dma		= 0;
dma               458 drivers/staging/emxx_udc/emxx_udc.c 	if (req->req.dma == DMA_ADDR_INVALID) {
dma               460 drivers/staging/emxx_udc/emxx_udc.c 			req->req.dma = ep->phys_buf;
dma               462 drivers/staging/emxx_udc/emxx_udc.c 			req->req.dma = dma_map_single(udc->gadget.dev.parent,
dma               473 drivers/staging/emxx_udc/emxx_udc.c 						   req->req.dma,
dma               508 drivers/staging/emxx_udc/emxx_udc.c 					 req->req.dma, req->req.length,
dma               513 drivers/staging/emxx_udc/emxx_udc.c 		req->req.dma = DMA_ADDR_INVALID;
dma               518 drivers/staging/emxx_udc/emxx_udc.c 						req->req.dma, req->req.length,
dma               803 drivers/staging/emxx_udc/emxx_udc.c 	p_buffer = req->req.dma;
dma               915 drivers/staging/emxx_udc/emxx_udc.c 	if ((ep->ep_type != USB_ENDPOINT_XFER_INT) && (req->req.dma != 0) &&
dma              1030 drivers/staging/emxx_udc/emxx_udc.c 	p_buffer = req->req.dma;
dma              1112 drivers/staging/emxx_udc/emxx_udc.c 	if ((ep->ep_type != USB_ENDPOINT_XFER_INT) && (req->req.dma != 0) &&
dma              1848 drivers/staging/emxx_udc/emxx_udc.c 	    (req->req.dma != 0))
dma              2515 drivers/staging/emxx_udc/emxx_udc.c 	req->req.dma = DMA_ADDR_INVALID;
dma              2608 drivers/staging/emxx_udc/emxx_udc.c 	    (req->req.dma != 0))
dma                94 drivers/staging/isdn/avm/avmcard.h 	avmcard_dmainfo *dma;
dma               356 drivers/staging/isdn/avm/b1dma.c 	skb_queue_tail(&card->dma->send_queue, skb);
dma               370 drivers/staging/isdn/avm/b1dma.c 	avmcard_dmainfo *dma = card->dma;
dma               377 drivers/staging/isdn/avm/b1dma.c 	skb = skb_dequeue(&dma->send_queue);
dma               385 drivers/staging/isdn/avm/b1dma.c 		p = dma->sendbuf.dmabuf;
dma               396 drivers/staging/isdn/avm/b1dma.c 		txlen = (u8 *)p - (u8 *)dma->sendbuf.dmabuf;
dma               410 drivers/staging/isdn/avm/b1dma.c 		skb_copy_from_linear_data_offset(skb, 2, dma->sendbuf.dmabuf,
dma               415 drivers/staging/isdn/avm/b1dma.c 	b1dma_writel(card, dma->sendbuf.dmaaddr, AMCC_TXPTR);
dma               450 drivers/staging/isdn/avm/b1dma.c 	avmcard_dmainfo *dma = card->dma;
dma               453 drivers/staging/isdn/avm/b1dma.c 	void *p = dma->recvbuf.dmabuf + 4;
dma               458 drivers/staging/isdn/avm/b1dma.c 	printk(KERN_DEBUG "rx: 0x%x %lu\n", b1cmd, (unsigned long)dma->recvlen);
dma               603 drivers/staging/isdn/avm/b1dma.c 		struct avmcard_dmainfo *dma = card->dma;
dma               605 drivers/staging/isdn/avm/b1dma.c 		if (card->dma->recvlen == 0) {
dma               608 drivers/staging/isdn/avm/b1dma.c 				dma->recvlen = *((u32 *)dma->recvbuf.dmabuf);
dma               609 drivers/staging/isdn/avm/b1dma.c 				rxlen = (dma->recvlen + 3) & ~3;
dma               610 drivers/staging/isdn/avm/b1dma.c 				b1dma_writel(card, dma->recvbuf.dmaaddr + 4, AMCC_RXPTR);
dma               621 drivers/staging/isdn/avm/b1dma.c 			dma->recvlen = 0;
dma               623 drivers/staging/isdn/avm/b1dma.c 			b1dma_writel(card, dma->recvbuf.dmaaddr, AMCC_RXPTR);
dma               629 drivers/staging/isdn/avm/b1dma.c 		if (skb_queue_empty(&card->dma->send_queue))
dma               742 drivers/staging/isdn/avm/b1dma.c 	card->dma->recvlen = 0;
dma               743 drivers/staging/isdn/avm/b1dma.c 	b1dma_writel(card, card->dma->recvbuf.dmaaddr, AMCC_RXPTR);
dma               925 drivers/staging/isdn/avm/b1dma.c 	txoff = (dma_addr_t)b1dma_readl(card, AMCC_TXPTR)-card->dma->sendbuf.dmaaddr;
dma               928 drivers/staging/isdn/avm/b1dma.c 	rxoff = (dma_addr_t)b1dma_readl(card, AMCC_RXPTR)-card->dma->recvbuf.dmaaddr;
dma               196 drivers/staging/isdn/avm/b1pci.c 	card->dma = avmcard_dma_alloc("b1pci", pdev, 2048 + 128, 2048 + 128);
dma               197 drivers/staging/isdn/avm/b1pci.c 	if (!card->dma) {
dma               277 drivers/staging/isdn/avm/b1pci.c 	avmcard_dma_free(card->dma);
dma               296 drivers/staging/isdn/avm/b1pci.c 	avmcard_dma_free(card->dma);
dma               352 drivers/staging/isdn/avm/b1pci.c 	if (card->dma)
dma               412 drivers/staging/isdn/avm/c4.c 	avmcard_dmainfo *dma = card->dma;
dma               424 drivers/staging/isdn/avm/c4.c 	skb = skb_dequeue(&dma->send_queue);
dma               438 drivers/staging/isdn/avm/c4.c 		p = dma->sendbuf.dmabuf;
dma               449 drivers/staging/isdn/avm/c4.c 		txlen = (u8 *)p - (u8 *)dma->sendbuf.dmabuf;
dma               463 drivers/staging/isdn/avm/c4.c 		skb_copy_from_linear_data_offset(skb, 2, dma->sendbuf.dmabuf,
dma               468 drivers/staging/isdn/avm/c4.c 	c4outmeml(card->mbase + MBOX_DOWN_ADDR, dma->sendbuf.dmaaddr);
dma               497 drivers/staging/isdn/avm/c4.c 	skb_queue_tail(&card->dma->send_queue, skb);
dma               505 drivers/staging/isdn/avm/c4.c 	avmcard_dmainfo *dma = card->dma;
dma               509 drivers/staging/isdn/avm/c4.c 	void *p = dma->recvbuf.dmabuf;
dma               517 drivers/staging/isdn/avm/c4.c 	       b1cmd, (unsigned long)dma->recvlen);
dma               701 drivers/staging/isdn/avm/c4.c 		card->dma->recvlen = c4inmeml(card->mbase + MBOX_UP_LEN);
dma               704 drivers/staging/isdn/avm/c4.c 		card->dma->recvlen = 0;
dma               705 drivers/staging/isdn/avm/c4.c 		c4outmeml(card->mbase + MBOX_UP_LEN, card->dma->recvbuf.size);
dma               752 drivers/staging/isdn/avm/c4.c 	skb_queue_tail(&card->dma->send_queue, skb);
dma               777 drivers/staging/isdn/avm/c4.c 	skb_queue_tail(&card->dma->send_queue, skb);
dma               806 drivers/staging/isdn/avm/c4.c 	skb_queue_tail(&card->dma->send_queue, skb);
dma               878 drivers/staging/isdn/avm/c4.c 	card->dma->recvlen = 0;
dma               879 drivers/staging/isdn/avm/c4.c 	c4outmeml(card->mbase + MBOX_UP_ADDR, card->dma->recvbuf.dmaaddr);
dma               880 drivers/staging/isdn/avm/c4.c 	c4outmeml(card->mbase + MBOX_UP_LEN, card->dma->recvbuf.size);
dma               939 drivers/staging/isdn/avm/c4.c 	avmcard_dma_free(card->dma);
dma               982 drivers/staging/isdn/avm/c4.c 		skb_queue_tail(&card->dma->send_queue, skb);
dma              1018 drivers/staging/isdn/avm/c4.c 		skb_queue_tail(&card->dma->send_queue, skb);
dma              1043 drivers/staging/isdn/avm/c4.c 		skb_queue_tail(&card->dma->send_queue, skb);
dma              1146 drivers/staging/isdn/avm/c4.c 	card->dma = avmcard_dma_alloc("c4", dev, 2048 + 128, 2048 + 128);
dma              1147 drivers/staging/isdn/avm/c4.c 	if (!card->dma) {
dma              1230 drivers/staging/isdn/avm/c4.c 	avmcard_dma_free(card->dma);
dma                62 drivers/staging/isdn/avm/t1pci.c 	card->dma = avmcard_dma_alloc("t1pci", pdev, 2048 + 128, 2048 + 128);
dma                63 drivers/staging/isdn/avm/t1pci.c 	if (!card->dma) {
dma               146 drivers/staging/isdn/avm/t1pci.c 	avmcard_dma_free(card->dma);
dma               166 drivers/staging/isdn/avm/t1pci.c 	avmcard_dma_free(card->dma);
dma               263 drivers/staging/media/hantro/hantro_g1_h264_dec.c 	vdpu_write_relaxed(vpu, ctx->h264_dec.priv.dma, G1_REG_ADDR_QTABLE);
dma                94 drivers/staging/media/hantro/hantro_g1_mpeg2_dec.c 	vdpu_write_relaxed(vpu, ctx->mpeg2_dec.qtable.dma,
dma               414 drivers/staging/media/hantro/hantro_g1_vp8_dec.c 	vdpu_write_relaxed(vpu, ctx->vp8_dec.prob_tbl.dma,
dma               418 drivers/staging/media/hantro/hantro_g1_vp8_dec.c 	reg = G1_REG_FWD_PIC1_SEGMENT_BASE(ctx->vp8_dec.segment_map.dma);
dma                40 drivers/staging/media/hantro/hantro_h1_jpeg_enc.c 	vepu_write_relaxed(vpu, ctx->jpeg_enc.bounce_buffer.dma,
dma               614 drivers/staging/media/hantro/hantro_h264.c 	dma_free_coherent(vpu->dev, priv->size, priv->cpu, priv->dma);
dma               625 drivers/staging/media/hantro/hantro_h264.c 	priv->cpu = dma_alloc_coherent(vpu->dev, sizeof(*tbl), &priv->dma,
dma                34 drivers/staging/media/hantro/hantro_hw.h 	dma_addr_t dma;
dma               303 drivers/staging/media/hantro/hantro_jpeg.c 				&ctx->jpeg_enc.bounce_buffer.dma,
dma               317 drivers/staging/media/hantro/hantro_jpeg.c 		       ctx->jpeg_enc.bounce_buffer.dma,
dma                46 drivers/staging/media/hantro/hantro_mpeg2.c 				   &ctx->mpeg2_dec.qtable.dma,
dma                60 drivers/staging/media/hantro/hantro_mpeg2.c 			  ctx->mpeg2_dec.qtable.dma);
dma               165 drivers/staging/media/hantro/hantro_vp8.c 					  &aux_buf->dma, GFP_KERNEL);
dma               176 drivers/staging/media/hantro/hantro_vp8.c 					  &aux_buf->dma, GFP_KERNEL);
dma               187 drivers/staging/media/hantro/hantro_vp8.c 			  ctx->vp8_dec.segment_map.dma);
dma               198 drivers/staging/media/hantro/hantro_vp8.c 			  vp8_dec->segment_map.cpu, vp8_dec->segment_map.dma);
dma               200 drivers/staging/media/hantro/hantro_vp8.c 			  vp8_dec->prob_tbl.cpu, vp8_dec->prob_tbl.dma);
dma                72 drivers/staging/media/hantro/rk3399_vpu_hw_jpeg_enc.c 	vepu_write_relaxed(vpu, ctx->jpeg_enc.bounce_buffer.dma,
dma                95 drivers/staging/media/hantro/rk3399_vpu_hw_mpeg2_dec.c 	vdpu_write_relaxed(vpu, ctx->mpeg2_dec.qtable.dma,
dma               492 drivers/staging/media/hantro/rk3399_vpu_hw_vp8_dec.c 	vdpu_write_relaxed(vpu, ctx->vp8_dec.prob_tbl.dma,
dma               496 drivers/staging/media/hantro/rk3399_vpu_hw_vp8_dec.c 	reg = VDPU_REG_FWD_PIC1_SEGMENT_BASE(ctx->vp8_dec.segment_map.dma);
dma              1735 drivers/staging/media/ipu3/ipu3-abi.h 	} dma;
dma              2248 drivers/staging/rtl8192e/rtl8192e/r8192E_dev.c 		rtl92e_writel(dev, TX_DESC_BASE[i], priv->tx_ring[i].dma);
dma              1593 drivers/staging/rtl8192e/rtl8192e/rtl_core.c 	ring->desc, ring->dma);
dma              1832 drivers/staging/rtl8192e/rtl8192e/rtl_core.c 	dma_addr_t dma;
dma              1835 drivers/staging/rtl8192e/rtl8192e/rtl_core.c 	ring = pci_zalloc_consistent(priv->pdev, sizeof(*ring) * entries, &dma);
dma              1842 drivers/staging/rtl8192e/rtl8192e/rtl_core.c 	priv->tx_ring[prio].dma = dma;
dma              1849 drivers/staging/rtl8192e/rtl8192e/rtl_core.c 			(u32)dma + ((i + 1) % entries) *
dma               263 drivers/staging/rtl8192e/rtl8192e/rtl_core.h 	dma_addr_t dma;
dma                20 drivers/staging/wusbcore/host/whci/qset.c 	dma_addr_t dma;
dma                22 drivers/staging/wusbcore/host/whci/qset.c 	qset = dma_pool_zalloc(whc->qset_pool, mem_flags, &dma);
dma                26 drivers/staging/wusbcore/host/whci/qset.c 	qset->qset_dma = dma;
dma               203 drivers/thunderbolt/dma_port.c 	struct tb_dma_port *dma;
dma               210 drivers/thunderbolt/dma_port.c 	dma = kzalloc(sizeof(*dma), GFP_KERNEL);
dma               211 drivers/thunderbolt/dma_port.c 	if (!dma)
dma               214 drivers/thunderbolt/dma_port.c 	dma->buf = kmalloc_array(MAIL_DATA_DWORDS, sizeof(u32), GFP_KERNEL);
dma               215 drivers/thunderbolt/dma_port.c 	if (!dma->buf) {
dma               216 drivers/thunderbolt/dma_port.c 		kfree(dma);
dma               220 drivers/thunderbolt/dma_port.c 	dma->sw = sw;
dma               221 drivers/thunderbolt/dma_port.c 	dma->port = port;
dma               222 drivers/thunderbolt/dma_port.c 	dma->base = DMA_PORT_CAP;
dma               224 drivers/thunderbolt/dma_port.c 	return dma;
dma               231 drivers/thunderbolt/dma_port.c void dma_port_free(struct tb_dma_port *dma)
dma               233 drivers/thunderbolt/dma_port.c 	if (dma) {
dma               234 drivers/thunderbolt/dma_port.c 		kfree(dma->buf);
dma               235 drivers/thunderbolt/dma_port.c 		kfree(dma);
dma               239 drivers/thunderbolt/dma_port.c static int dma_port_wait_for_completion(struct tb_dma_port *dma,
dma               243 drivers/thunderbolt/dma_port.c 	struct tb_switch *sw = dma->sw;
dma               249 drivers/thunderbolt/dma_port.c 		ret = dma_port_read(sw->tb->ctl, &in, tb_route(sw), dma->port,
dma               250 drivers/thunderbolt/dma_port.c 				    dma->base + MAIL_IN, 1, 50);
dma               278 drivers/thunderbolt/dma_port.c static int dma_port_request(struct tb_dma_port *dma, u32 in,
dma               281 drivers/thunderbolt/dma_port.c 	struct tb_switch *sw = dma->sw;
dma               285 drivers/thunderbolt/dma_port.c 	ret = dma_port_write(sw->tb->ctl, &in, tb_route(sw), dma->port,
dma               286 drivers/thunderbolt/dma_port.c 			     dma->base + MAIL_IN, 1, DMA_PORT_TIMEOUT);
dma               290 drivers/thunderbolt/dma_port.c 	ret = dma_port_wait_for_completion(dma, timeout);
dma               294 drivers/thunderbolt/dma_port.c 	ret = dma_port_read(sw->tb->ctl, &out, tb_route(sw), dma->port,
dma               295 drivers/thunderbolt/dma_port.c 			    dma->base + MAIL_OUT, 1, DMA_PORT_TIMEOUT);
dma               302 drivers/thunderbolt/dma_port.c static int dma_port_flash_read_block(struct tb_dma_port *dma, u32 address,
dma               305 drivers/thunderbolt/dma_port.c 	struct tb_switch *sw = dma->sw;
dma               318 drivers/thunderbolt/dma_port.c 	ret = dma_port_request(dma, in, DMA_PORT_TIMEOUT);
dma               322 drivers/thunderbolt/dma_port.c 	return dma_port_read(sw->tb->ctl, buf, tb_route(sw), dma->port,
dma               323 drivers/thunderbolt/dma_port.c 			     dma->base + MAIL_DATA, dwords, DMA_PORT_TIMEOUT);
dma               326 drivers/thunderbolt/dma_port.c static int dma_port_flash_write_block(struct tb_dma_port *dma, u32 address,
dma               329 drivers/thunderbolt/dma_port.c 	struct tb_switch *sw = dma->sw;
dma               336 drivers/thunderbolt/dma_port.c 	ret = dma_port_write(sw->tb->ctl, buf, tb_route(sw), dma->port,
dma               337 drivers/thunderbolt/dma_port.c 			    dma->base + MAIL_DATA, dwords, DMA_PORT_TIMEOUT);
dma               353 drivers/thunderbolt/dma_port.c 	return dma_port_request(dma, in, DMA_PORT_TIMEOUT);
dma               363 drivers/thunderbolt/dma_port.c int dma_port_flash_read(struct tb_dma_port *dma, unsigned int address,
dma               376 drivers/thunderbolt/dma_port.c 		ret = dma_port_flash_read_block(dma, address, dma->buf,
dma               387 drivers/thunderbolt/dma_port.c 		memcpy(buf, dma->buf + offset, nbytes);
dma               408 drivers/thunderbolt/dma_port.c int dma_port_flash_write(struct tb_dma_port *dma, unsigned int address,
dma               427 drivers/thunderbolt/dma_port.c 		memcpy(dma->buf + offset, buf, nbytes);
dma               429 drivers/thunderbolt/dma_port.c 		ret = dma_port_flash_write_block(dma, address, buf, nbytes);
dma               458 drivers/thunderbolt/dma_port.c int dma_port_flash_update_auth(struct tb_dma_port *dma)
dma               465 drivers/thunderbolt/dma_port.c 	return dma_port_request(dma, in, 150);
dma               481 drivers/thunderbolt/dma_port.c int dma_port_flash_update_auth_status(struct tb_dma_port *dma, u32 *status)
dma               483 drivers/thunderbolt/dma_port.c 	struct tb_switch *sw = dma->sw;
dma               487 drivers/thunderbolt/dma_port.c 	ret = dma_port_read(sw->tb->ctl, &out, tb_route(sw), dma->port,
dma               488 drivers/thunderbolt/dma_port.c 			    dma->base + MAIL_OUT, 1, DMA_PORT_TIMEOUT);
dma               511 drivers/thunderbolt/dma_port.c int dma_port_power_cycle(struct tb_dma_port *dma)
dma               518 drivers/thunderbolt/dma_port.c 	return dma_port_request(dma, in, 150);
dma                22 drivers/thunderbolt/dma_port.h void dma_port_free(struct tb_dma_port *dma);
dma                23 drivers/thunderbolt/dma_port.h int dma_port_flash_read(struct tb_dma_port *dma, unsigned int address,
dma                25 drivers/thunderbolt/dma_port.h int dma_port_flash_update_auth(struct tb_dma_port *dma);
dma                26 drivers/thunderbolt/dma_port.h int dma_port_flash_update_auth_status(struct tb_dma_port *dma, u32 *status);
dma                27 drivers/thunderbolt/dma_port.h int dma_port_flash_write(struct tb_dma_port *dma, unsigned int address,
dma                29 drivers/thunderbolt/dma_port.h int dma_port_power_cycle(struct tb_dma_port *dma);
dma              1009 drivers/tty/serial/8250/8250_core.c 		uart->dma		= up->dma;
dma                17 drivers/tty/serial/8250/8250_dma.c 	struct uart_8250_dma	*dma = p->dma;
dma                22 drivers/tty/serial/8250/8250_dma.c 	dma_sync_single_for_cpu(dma->txchan->device->dev, dma->tx_addr,
dma                27 drivers/tty/serial/8250/8250_dma.c 	dma->tx_running = 0;
dma                29 drivers/tty/serial/8250/8250_dma.c 	xmit->tail += dma->tx_size;
dma                31 drivers/tty/serial/8250/8250_dma.c 	p->port.icount.tx += dma->tx_size;
dma                46 drivers/tty/serial/8250/8250_dma.c 	struct uart_8250_dma	*dma = p->dma;
dma                51 drivers/tty/serial/8250/8250_dma.c 	dma->rx_running = 0;
dma                52 drivers/tty/serial/8250/8250_dma.c 	dmaengine_tx_status(dma->rxchan, dma->rx_cookie, &state);
dma                54 drivers/tty/serial/8250/8250_dma.c 	count = dma->rx_size - state.residue;
dma                56 drivers/tty/serial/8250/8250_dma.c 	tty_insert_flip_string(tty_port, dma->rx_buf, count);
dma                64 drivers/tty/serial/8250/8250_dma.c 	struct uart_8250_dma		*dma = p->dma;
dma                69 drivers/tty/serial/8250/8250_dma.c 	if (dma->tx_running)
dma                78 drivers/tty/serial/8250/8250_dma.c 	dma->tx_size = CIRC_CNT_TO_END(xmit->head, xmit->tail, UART_XMIT_SIZE);
dma                80 drivers/tty/serial/8250/8250_dma.c 	desc = dmaengine_prep_slave_single(dma->txchan,
dma                81 drivers/tty/serial/8250/8250_dma.c 					   dma->tx_addr + xmit->tail,
dma                82 drivers/tty/serial/8250/8250_dma.c 					   dma->tx_size, DMA_MEM_TO_DEV,
dma                89 drivers/tty/serial/8250/8250_dma.c 	dma->tx_running = 1;
dma                93 drivers/tty/serial/8250/8250_dma.c 	dma->tx_cookie = dmaengine_submit(desc);
dma                95 drivers/tty/serial/8250/8250_dma.c 	dma_sync_single_for_device(dma->txchan->device->dev, dma->tx_addr,
dma                98 drivers/tty/serial/8250/8250_dma.c 	dma_async_issue_pending(dma->txchan);
dma                99 drivers/tty/serial/8250/8250_dma.c 	if (dma->tx_err) {
dma               100 drivers/tty/serial/8250/8250_dma.c 		dma->tx_err = 0;
dma               105 drivers/tty/serial/8250/8250_dma.c 	dma->tx_err = 1;
dma               111 drivers/tty/serial/8250/8250_dma.c 	struct uart_8250_dma		*dma = p->dma;
dma               114 drivers/tty/serial/8250/8250_dma.c 	if (dma->rx_running)
dma               117 drivers/tty/serial/8250/8250_dma.c 	desc = dmaengine_prep_slave_single(dma->rxchan, dma->rx_addr,
dma               118 drivers/tty/serial/8250/8250_dma.c 					   dma->rx_size, DMA_DEV_TO_MEM,
dma               123 drivers/tty/serial/8250/8250_dma.c 	dma->rx_running = 1;
dma               127 drivers/tty/serial/8250/8250_dma.c 	dma->rx_cookie = dmaengine_submit(desc);
dma               129 drivers/tty/serial/8250/8250_dma.c 	dma_async_issue_pending(dma->rxchan);
dma               136 drivers/tty/serial/8250/8250_dma.c 	struct uart_8250_dma *dma = p->dma;
dma               138 drivers/tty/serial/8250/8250_dma.c 	if (dma->rx_running) {
dma               139 drivers/tty/serial/8250/8250_dma.c 		dmaengine_pause(dma->rxchan);
dma               141 drivers/tty/serial/8250/8250_dma.c 		dmaengine_terminate_async(dma->rxchan);
dma               148 drivers/tty/serial/8250/8250_dma.c 	struct uart_8250_dma	*dma = p->dma;
dma               149 drivers/tty/serial/8250/8250_dma.c 	phys_addr_t rx_dma_addr = dma->rx_dma_addr ?
dma               150 drivers/tty/serial/8250/8250_dma.c 				  dma->rx_dma_addr : p->port.mapbase;
dma               151 drivers/tty/serial/8250/8250_dma.c 	phys_addr_t tx_dma_addr = dma->tx_dma_addr ?
dma               152 drivers/tty/serial/8250/8250_dma.c 				  dma->tx_dma_addr : p->port.mapbase;
dma               158 drivers/tty/serial/8250/8250_dma.c 	dma->rxconf.direction		= DMA_DEV_TO_MEM;
dma               159 drivers/tty/serial/8250/8250_dma.c 	dma->rxconf.src_addr_width	= DMA_SLAVE_BUSWIDTH_1_BYTE;
dma               160 drivers/tty/serial/8250/8250_dma.c 	dma->rxconf.src_addr		= rx_dma_addr + UART_RX;
dma               162 drivers/tty/serial/8250/8250_dma.c 	dma->txconf.direction		= DMA_MEM_TO_DEV;
dma               163 drivers/tty/serial/8250/8250_dma.c 	dma->txconf.dst_addr_width	= DMA_SLAVE_BUSWIDTH_1_BYTE;
dma               164 drivers/tty/serial/8250/8250_dma.c 	dma->txconf.dst_addr		= tx_dma_addr + UART_TX;
dma               170 drivers/tty/serial/8250/8250_dma.c 	dma->rxchan = dma_request_slave_channel_compat(mask,
dma               171 drivers/tty/serial/8250/8250_dma.c 						       dma->fn, dma->rx_param,
dma               173 drivers/tty/serial/8250/8250_dma.c 	if (!dma->rxchan)
dma               177 drivers/tty/serial/8250/8250_dma.c 	ret = dma_get_slave_caps(dma->rxchan, &caps);
dma               186 drivers/tty/serial/8250/8250_dma.c 	dmaengine_slave_config(dma->rxchan, &dma->rxconf);
dma               189 drivers/tty/serial/8250/8250_dma.c 	dma->txchan = dma_request_slave_channel_compat(mask,
dma               190 drivers/tty/serial/8250/8250_dma.c 						       dma->fn, dma->tx_param,
dma               192 drivers/tty/serial/8250/8250_dma.c 	if (!dma->txchan) {
dma               198 drivers/tty/serial/8250/8250_dma.c 	ret = dma_get_slave_caps(dma->txchan, &caps);
dma               206 drivers/tty/serial/8250/8250_dma.c 	dmaengine_slave_config(dma->txchan, &dma->txconf);
dma               209 drivers/tty/serial/8250/8250_dma.c 	if (!dma->rx_size)
dma               210 drivers/tty/serial/8250/8250_dma.c 		dma->rx_size = PAGE_SIZE;
dma               212 drivers/tty/serial/8250/8250_dma.c 	dma->rx_buf = dma_alloc_coherent(dma->rxchan->device->dev, dma->rx_size,
dma               213 drivers/tty/serial/8250/8250_dma.c 					&dma->rx_addr, GFP_KERNEL);
dma               214 drivers/tty/serial/8250/8250_dma.c 	if (!dma->rx_buf) {
dma               220 drivers/tty/serial/8250/8250_dma.c 	dma->tx_addr = dma_map_single(dma->txchan->device->dev,
dma               224 drivers/tty/serial/8250/8250_dma.c 	if (dma_mapping_error(dma->txchan->device->dev, dma->tx_addr)) {
dma               225 drivers/tty/serial/8250/8250_dma.c 		dma_free_coherent(dma->rxchan->device->dev, dma->rx_size,
dma               226 drivers/tty/serial/8250/8250_dma.c 				  dma->rx_buf, dma->rx_addr);
dma               235 drivers/tty/serial/8250/8250_dma.c 	dma_release_channel(dma->txchan);
dma               237 drivers/tty/serial/8250/8250_dma.c 	dma_release_channel(dma->rxchan);
dma               244 drivers/tty/serial/8250/8250_dma.c 	struct uart_8250_dma *dma = p->dma;
dma               246 drivers/tty/serial/8250/8250_dma.c 	if (!dma)
dma               250 drivers/tty/serial/8250/8250_dma.c 	dmaengine_terminate_sync(dma->rxchan);
dma               251 drivers/tty/serial/8250/8250_dma.c 	dma_free_coherent(dma->rxchan->device->dev, dma->rx_size, dma->rx_buf,
dma               252 drivers/tty/serial/8250/8250_dma.c 			  dma->rx_addr);
dma               253 drivers/tty/serial/8250/8250_dma.c 	dma_release_channel(dma->rxchan);
dma               254 drivers/tty/serial/8250/8250_dma.c 	dma->rxchan = NULL;
dma               257 drivers/tty/serial/8250/8250_dma.c 	dmaengine_terminate_sync(dma->txchan);
dma               258 drivers/tty/serial/8250/8250_dma.c 	dma_unmap_single(dma->txchan->device->dev, dma->tx_addr,
dma               260 drivers/tty/serial/8250/8250_dma.c 	dma_release_channel(dma->txchan);
dma               261 drivers/tty/serial/8250/8250_dma.c 	dma->txchan = NULL;
dma               262 drivers/tty/serial/8250/8250_dma.c 	dma->tx_running = 0;
dma               240 drivers/tty/serial/8250/8250_dw.c 	if (!up->dma && ((iir & 0x3f) == UART_IIR_RX_TIMEOUT)) {
dma               379 drivers/tty/serial/8250/8250_dw.c 		data->data.dma.rx_param = p->dev->parent;
dma               380 drivers/tty/serial/8250/8250_dw.c 		data->data.dma.tx_param = p->dev->parent;
dma               381 drivers/tty/serial/8250/8250_dw.c 		data->data.dma.fn = dw8250_idma_filter;
dma               427 drivers/tty/serial/8250/8250_dw.c 	data->data.dma.fn = dw8250_fallback_dma_filter;
dma               525 drivers/tty/serial/8250/8250_dw.c 		data->data.dma.rxconf.src_maxburst = p->fifosize / 4;
dma               526 drivers/tty/serial/8250/8250_dw.c 		data->data.dma.txconf.dst_maxburst = p->fifosize / 4;
dma               527 drivers/tty/serial/8250/8250_dw.c 		up->dma = &data->data.dma;
dma                13 drivers/tty/serial/8250/8250_dwlib.h 	struct uart_8250_dma	dma;
dma                29 drivers/tty/serial/8250/8250_lpc18xx.c 	struct uart_8250_dma dma;
dma               157 drivers/tty/serial/8250/8250_lpc18xx.c 	data->dma.rx_param = data;
dma               158 drivers/tty/serial/8250/8250_lpc18xx.c 	data->dma.tx_param = data;
dma               173 drivers/tty/serial/8250/8250_lpc18xx.c 	uart.dma = &data->dma;
dma               174 drivers/tty/serial/8250/8250_lpc18xx.c 	uart.dma->rxconf.src_maxburst = 1;
dma               175 drivers/tty/serial/8250/8250_lpc18xx.c 	uart.dma->txconf.dst_maxburst = 1;
dma               172 drivers/tty/serial/8250/8250_lpss.c 	struct uart_8250_dma *dma = &lpss->data.dma;
dma               194 drivers/tty/serial/8250/8250_lpss.c 	dma->rx_dma_addr = 0xfffff000;
dma               195 drivers/tty/serial/8250/8250_lpss.c 	dma->tx_dma_addr = 0xfffff000;
dma               257 drivers/tty/serial/8250/8250_lpss.c 	struct uart_8250_dma *dma = &lpss->data.dma;
dma               273 drivers/tty/serial/8250/8250_lpss.c 	dma->rxconf.src_maxburst = lpss->dma_maxburst;
dma               276 drivers/tty/serial/8250/8250_lpss.c 	dma->txconf.dst_maxburst = lpss->dma_maxburst;
dma               278 drivers/tty/serial/8250/8250_lpss.c 	dma->fn = lpss8250_dma_filter;
dma               279 drivers/tty/serial/8250/8250_lpss.c 	dma->rx_param = rx_param;
dma               280 drivers/tty/serial/8250/8250_lpss.c 	dma->tx_param = tx_param;
dma               282 drivers/tty/serial/8250/8250_lpss.c 	port->dma = dma;
dma                46 drivers/tty/serial/8250/8250_mid.c 	struct uart_8250_dma dma;
dma               248 drivers/tty/serial/8250/8250_mid.c 	struct uart_8250_dma *dma = &mid->dma;
dma               267 drivers/tty/serial/8250/8250_mid.c 	dma->rxconf.src_maxburst = 64;
dma               268 drivers/tty/serial/8250/8250_mid.c 	dma->txconf.dst_maxburst = 64;
dma               273 drivers/tty/serial/8250/8250_mid.c 	dma->fn = mid8250_dma_filter;
dma               274 drivers/tty/serial/8250/8250_mid.c 	dma->rx_param = rx_param;
dma               275 drivers/tty/serial/8250/8250_mid.c 	dma->tx_param = tx_param;
dma               277 drivers/tty/serial/8250/8250_mid.c 	port->dma = dma;
dma                69 drivers/tty/serial/8250/8250_mtk.c 	struct uart_8250_dma	*dma;
dma                89 drivers/tty/serial/8250/8250_mtk.c 	struct uart_8250_dma *dma = up->dma;
dma                99 drivers/tty/serial/8250/8250_mtk.c 	dmaengine_tx_status(dma->rxchan, dma->rx_cookie, &state);
dma               100 drivers/tty/serial/8250/8250_mtk.c 	total = dma->rx_size - state.residue;
dma               103 drivers/tty/serial/8250/8250_mtk.c 	if ((data->rx_pos + cnt) > dma->rx_size)
dma               104 drivers/tty/serial/8250/8250_mtk.c 		cnt = dma->rx_size - data->rx_pos;
dma               106 drivers/tty/serial/8250/8250_mtk.c 	ptr = (unsigned char *)(data->rx_pos + dma->rx_buf);
dma               111 drivers/tty/serial/8250/8250_mtk.c 		ptr = (unsigned char *)(dma->rx_buf);
dma               126 drivers/tty/serial/8250/8250_mtk.c 	struct uart_8250_dma *dma = up->dma;
dma               129 drivers/tty/serial/8250/8250_mtk.c 	desc = dmaengine_prep_slave_single(dma->rxchan, dma->rx_addr,
dma               130 drivers/tty/serial/8250/8250_mtk.c 					   dma->rx_size, DMA_DEV_TO_MEM,
dma               140 drivers/tty/serial/8250/8250_mtk.c 	dma->rx_cookie = dmaengine_submit(desc);
dma               142 drivers/tty/serial/8250/8250_mtk.c 	dma_async_issue_pending(dma->rxchan);
dma               147 drivers/tty/serial/8250/8250_mtk.c 	struct uart_8250_dma *dma = up->dma;
dma               154 drivers/tty/serial/8250/8250_mtk.c 	dma->rxconf.src_port_window_size	= dma->rx_size;
dma               155 drivers/tty/serial/8250/8250_mtk.c 	dma->rxconf.src_addr				= dma->rx_addr;
dma               157 drivers/tty/serial/8250/8250_mtk.c 	dma->txconf.dst_port_window_size	= UART_XMIT_SIZE;
dma               158 drivers/tty/serial/8250/8250_mtk.c 	dma->txconf.dst_addr				= dma->tx_addr;
dma               169 drivers/tty/serial/8250/8250_mtk.c 	if (dmaengine_slave_config(dma->rxchan, &dma->rxconf) != 0)
dma               171 drivers/tty/serial/8250/8250_mtk.c 	if (dmaengine_slave_config(dma->txchan, &dma->txconf) != 0)
dma               188 drivers/tty/serial/8250/8250_mtk.c 		up->dma = NULL;
dma               190 drivers/tty/serial/8250/8250_mtk.c 	if (up->dma) {
dma               206 drivers/tty/serial/8250/8250_mtk.c 	if (up->dma)
dma               298 drivers/tty/serial/8250/8250_mtk.c 	if (up->dma) {
dma               300 drivers/tty/serial/8250/8250_mtk.c 			devm_kfree(up->port.dev, up->dma);
dma               301 drivers/tty/serial/8250/8250_mtk.c 			up->dma = NULL;
dma               463 drivers/tty/serial/8250/8250_mtk.c 	data->dma = NULL;
dma               467 drivers/tty/serial/8250/8250_mtk.c 		data->dma = devm_kzalloc(&pdev->dev, sizeof(*data->dma),
dma               469 drivers/tty/serial/8250/8250_mtk.c 		if (!data->dma)
dma               472 drivers/tty/serial/8250/8250_mtk.c 		data->dma->fn = mtk8250_dma_filter;
dma               473 drivers/tty/serial/8250/8250_mtk.c 		data->dma->rx_size = MTK_UART_RX_SIZE;
dma               474 drivers/tty/serial/8250/8250_mtk.c 		data->dma->rxconf.src_maxburst = MTK_UART_RX_TRIGGER;
dma               475 drivers/tty/serial/8250/8250_mtk.c 		data->dma->txconf.dst_maxburst = MTK_UART_TX_TRIGGER;
dma               526 drivers/tty/serial/8250/8250_mtk.c 	if (data->dma)
dma               527 drivers/tty/serial/8250/8250_mtk.c 		uart.dma = data->dma;
dma               275 drivers/tty/serial/8250/8250_omap.c 	struct uart_8250_dma	*dma = up->dma;
dma               277 drivers/tty/serial/8250/8250_omap.c 	if (dma && dma->tx_running) {
dma               448 drivers/tty/serial/8250/8250_omap.c 	if (up->dma)
dma               591 drivers/tty/serial/8250/8250_omap.c 	if (up->dma) {
dma               629 drivers/tty/serial/8250/8250_omap.c 		up->dma = NULL;
dma               631 drivers/tty/serial/8250/8250_omap.c 	if (up->dma) {
dma               636 drivers/tty/serial/8250/8250_omap.c 			up->dma = NULL;
dma               658 drivers/tty/serial/8250/8250_omap.c 	if (up->dma)
dma               659 drivers/tty/serial/8250/8250_omap.c 		up->dma->rx_dma(up);
dma               677 drivers/tty/serial/8250/8250_omap.c 	if (up->dma)
dma               687 drivers/tty/serial/8250/8250_omap.c 	if (up->dma)
dma               761 drivers/tty/serial/8250/8250_omap.c 	if (up->dma)
dma               762 drivers/tty/serial/8250/8250_omap.c 		up->dma->rx_dma(up);
dma               777 drivers/tty/serial/8250/8250_omap.c 	struct uart_8250_dma    *dma = p->dma;
dma               786 drivers/tty/serial/8250/8250_omap.c 	if (!dma->rx_running)
dma               789 drivers/tty/serial/8250/8250_omap.c 	dma->rx_running = 0;
dma               790 drivers/tty/serial/8250/8250_omap.c 	dmaengine_tx_status(dma->rxchan, dma->rx_cookie, &state);
dma               792 drivers/tty/serial/8250/8250_omap.c 	count = dma->rx_size - state.residue;
dma               794 drivers/tty/serial/8250/8250_omap.c 	ret = tty_insert_flip_string(tty_port, dma->rx_buf, count);
dma               808 drivers/tty/serial/8250/8250_omap.c 	struct uart_8250_dma *dma = p->dma;
dma               819 drivers/tty/serial/8250/8250_omap.c 	if (dmaengine_tx_status(dma->rxchan, dma->rx_cookie, &state) !=
dma               834 drivers/tty/serial/8250/8250_omap.c 	struct uart_8250_dma	*dma = p->dma;
dma               841 drivers/tty/serial/8250/8250_omap.c 	if (!dma->rx_running) {
dma               846 drivers/tty/serial/8250/8250_omap.c 	ret = dmaengine_tx_status(dma->rxchan, dma->rx_cookie, &state);
dma               848 drivers/tty/serial/8250/8250_omap.c 		ret = dmaengine_pause(dma->rxchan);
dma               855 drivers/tty/serial/8250/8250_omap.c 	dmaengine_terminate_all(dma->rxchan);
dma               861 drivers/tty/serial/8250/8250_omap.c 	struct uart_8250_dma            *dma = p->dma;
dma               871 drivers/tty/serial/8250/8250_omap.c 	if (dma->rx_running)
dma               874 drivers/tty/serial/8250/8250_omap.c 	desc = dmaengine_prep_slave_single(dma->rxchan, dma->rx_addr,
dma               875 drivers/tty/serial/8250/8250_omap.c 					   dma->rx_size, DMA_DEV_TO_MEM,
dma               882 drivers/tty/serial/8250/8250_omap.c 	dma->rx_running = 1;
dma               886 drivers/tty/serial/8250/8250_omap.c 	dma->rx_cookie = dmaengine_submit(desc);
dma               888 drivers/tty/serial/8250/8250_omap.c 	dma_async_issue_pending(dma->rxchan);
dma               899 drivers/tty/serial/8250/8250_omap.c 	struct uart_8250_dma	*dma = p->dma;
dma               905 drivers/tty/serial/8250/8250_omap.c 	dma_sync_single_for_cpu(dma->txchan->device->dev, dma->tx_addr,
dma               910 drivers/tty/serial/8250/8250_omap.c 	dma->tx_running = 0;
dma               912 drivers/tty/serial/8250/8250_omap.c 	xmit->tail += dma->tx_size;
dma               914 drivers/tty/serial/8250/8250_omap.c 	p->port.icount.tx += dma->tx_size;
dma               935 drivers/tty/serial/8250/8250_omap.c 		dma->tx_err = 1;
dma               944 drivers/tty/serial/8250/8250_omap.c 	struct uart_8250_dma		*dma = p->dma;
dma               951 drivers/tty/serial/8250/8250_omap.c 	if (dma->tx_running)
dma               960 drivers/tty/serial/8250/8250_omap.c 		if (dma->tx_err || p->capabilities & UART_CAP_RPM) {
dma               968 drivers/tty/serial/8250/8250_omap.c 	dma->tx_size = CIRC_CNT_TO_END(xmit->head, xmit->tail, UART_XMIT_SIZE);
dma               992 drivers/tty/serial/8250/8250_omap.c 		if (dma->tx_size < 4) {
dma               999 drivers/tty/serial/8250/8250_omap.c 	desc = dmaengine_prep_slave_single(dma->txchan,
dma              1000 drivers/tty/serial/8250/8250_omap.c 			dma->tx_addr + xmit->tail + skip_byte,
dma              1001 drivers/tty/serial/8250/8250_omap.c 			dma->tx_size - skip_byte, DMA_MEM_TO_DEV,
dma              1008 drivers/tty/serial/8250/8250_omap.c 	dma->tx_running = 1;
dma              1013 drivers/tty/serial/8250/8250_omap.c 	dma->tx_cookie = dmaengine_submit(desc);
dma              1015 drivers/tty/serial/8250/8250_omap.c 	dma_sync_single_for_device(dma->txchan->device->dev, dma->tx_addr,
dma              1018 drivers/tty/serial/8250/8250_omap.c 	dma_async_issue_pending(dma->txchan);
dma              1019 drivers/tty/serial/8250/8250_omap.c 	if (dma->tx_err)
dma              1020 drivers/tty/serial/8250/8250_omap.c 		dma->tx_err = 0;
dma              1027 drivers/tty/serial/8250/8250_omap.c 	dma->tx_err = 1;
dma              1074 drivers/tty/serial/8250/8250_omap.c 	if (status & UART_LSR_THRE && up->dma->tx_err) {
dma              1077 drivers/tty/serial/8250/8250_omap.c 			up->dma->tx_err = 0;
dma              1267 drivers/tty/serial/8250/8250_omap.c 		up.dma = &priv->omap8250_dma;
dma              1448 drivers/tty/serial/8250/8250_omap.c 	if (up->dma && up->dma->rxchan)
dma              1471 drivers/tty/serial/8250/8250_omap.c 	if (up->dma && up->dma->rxchan)
dma              1509 drivers/tty/serial/8250/8250_port.c 	if (up->dma && !up->dma->tx_dma(up))
dma              1808 drivers/tty/serial/8250/8250_port.c 	return up->dma->rx_dma(up);
dma              1828 drivers/tty/serial/8250/8250_port.c 		if (!up->dma || handle_rx_dma(up, iir))
dma              1832 drivers/tty/serial/8250/8250_port.c 	if ((!up->dma || up->dma->tx_err) && (status & UART_LSR_THRE) &&
dma              2311 drivers/tty/serial/8250/8250_port.c 	if (up->dma) {
dma              2316 drivers/tty/serial/8250/8250_port.c 			up->dma = NULL;
dma              2366 drivers/tty/serial/8250/8250_port.c 	if (up->dma)
dma              2584 drivers/tty/serial/8250/8250_port.c 		if ((baud < 2400 && !up->dma) || up->fifo_bug) {
dma              3099 drivers/tty/serial/8250/8250_port.c 	if (up->dma) {
dma              3100 drivers/tty/serial/8250/8250_port.c 		if (!up->dma->tx_dma)
dma              3101 drivers/tty/serial/8250/8250_port.c 			up->dma->tx_dma = serial8250_tx_dma;
dma              3102 drivers/tty/serial/8250/8250_port.c 		if (!up->dma->rx_dma)
dma              3103 drivers/tty/serial/8250/8250_port.c 			up->dma->rx_dma = serial8250_rx_dma;
dma               129 drivers/tty/serial/cpm_uart/cpm_uart.h 	u32 dma = (u32)pinfo->dma_addr;
dma               131 drivers/tty/serial/cpm_uart/cpm_uart.h 	if (likely(val >= dma && val < dma + pinfo->mem_size)) {
dma               132 drivers/tty/serial/cpm_uart/cpm_uart.h 		offset = val - dma;
dma               245 drivers/tty/serial/msm_serial.c static void msm_stop_dma(struct uart_port *port, struct msm_dma *dma)
dma               251 drivers/tty/serial/msm_serial.c 	mapped = dma->count;
dma               252 drivers/tty/serial/msm_serial.c 	dma->count = 0;
dma               254 drivers/tty/serial/msm_serial.c 	dmaengine_terminate_all(dma->chan);
dma               264 drivers/tty/serial/msm_serial.c 	val &= ~dma->enable_bit;
dma               268 drivers/tty/serial/msm_serial.c 		dma_unmap_single(dev, dma->phys, mapped, dma->dir);
dma               273 drivers/tty/serial/msm_serial.c 	struct msm_dma *dma;
dma               275 drivers/tty/serial/msm_serial.c 	dma = &msm_port->tx_dma;
dma               276 drivers/tty/serial/msm_serial.c 	if (dma->chan) {
dma               277 drivers/tty/serial/msm_serial.c 		msm_stop_dma(&msm_port->uart, dma);
dma               278 drivers/tty/serial/msm_serial.c 		dma_release_channel(dma->chan);
dma               281 drivers/tty/serial/msm_serial.c 	memset(dma, 0, sizeof(*dma));
dma               283 drivers/tty/serial/msm_serial.c 	dma = &msm_port->rx_dma;
dma               284 drivers/tty/serial/msm_serial.c 	if (dma->chan) {
dma               285 drivers/tty/serial/msm_serial.c 		msm_stop_dma(&msm_port->uart, dma);
dma               286 drivers/tty/serial/msm_serial.c 		dma_release_channel(dma->chan);
dma               287 drivers/tty/serial/msm_serial.c 		kfree(dma->virt);
dma               290 drivers/tty/serial/msm_serial.c 	memset(dma, 0, sizeof(*dma));
dma               297 drivers/tty/serial/msm_serial.c 	struct msm_dma *dma;
dma               301 drivers/tty/serial/msm_serial.c 	dma = &msm_port->tx_dma;
dma               304 drivers/tty/serial/msm_serial.c 	dma->chan = dma_request_slave_channel_reason(dev, "tx");
dma               305 drivers/tty/serial/msm_serial.c 	if (IS_ERR(dma->chan))
dma               317 drivers/tty/serial/msm_serial.c 	ret = dmaengine_slave_config(dma->chan, &conf);
dma               321 drivers/tty/serial/msm_serial.c 	dma->dir = DMA_TO_DEVICE;
dma               324 drivers/tty/serial/msm_serial.c 		dma->enable_bit = UARTDM_DMEN_TX_DM_ENABLE;
dma               326 drivers/tty/serial/msm_serial.c 		dma->enable_bit = UARTDM_DMEN_TX_BAM_ENABLE;
dma               331 drivers/tty/serial/msm_serial.c 	dma_release_channel(dma->chan);
dma               333 drivers/tty/serial/msm_serial.c 	memset(dma, 0, sizeof(*dma));
dma               340 drivers/tty/serial/msm_serial.c 	struct msm_dma *dma;
dma               344 drivers/tty/serial/msm_serial.c 	dma = &msm_port->rx_dma;
dma               347 drivers/tty/serial/msm_serial.c 	dma->chan = dma_request_slave_channel_reason(dev, "rx");
dma               348 drivers/tty/serial/msm_serial.c 	if (IS_ERR(dma->chan))
dma               353 drivers/tty/serial/msm_serial.c 	dma->virt = kzalloc(UARTDM_RX_SIZE, GFP_KERNEL);
dma               354 drivers/tty/serial/msm_serial.c 	if (!dma->virt)
dma               364 drivers/tty/serial/msm_serial.c 	ret = dmaengine_slave_config(dma->chan, &conf);
dma               368 drivers/tty/serial/msm_serial.c 	dma->dir = DMA_FROM_DEVICE;
dma               371 drivers/tty/serial/msm_serial.c 		dma->enable_bit = UARTDM_DMEN_RX_DM_ENABLE;
dma               373 drivers/tty/serial/msm_serial.c 		dma->enable_bit = UARTDM_DMEN_RX_BAM_ENABLE;
dma               377 drivers/tty/serial/msm_serial.c 	kfree(dma->virt);
dma               379 drivers/tty/serial/msm_serial.c 	dma_release_channel(dma->chan);
dma               381 drivers/tty/serial/msm_serial.c 	memset(dma, 0, sizeof(*dma));
dma               409 drivers/tty/serial/msm_serial.c 	struct msm_dma *dma = &msm_port->tx_dma;
dma               412 drivers/tty/serial/msm_serial.c 	if (dma->count)
dma               431 drivers/tty/serial/msm_serial.c 	struct msm_dma *dma = &msm_port->tx_dma;
dma               441 drivers/tty/serial/msm_serial.c 	if (!dma->count)
dma               444 drivers/tty/serial/msm_serial.c 	status = dmaengine_tx_status(dma->chan, dma->cookie, &state);
dma               446 drivers/tty/serial/msm_serial.c 	dma_unmap_single(port->dev, dma->phys, dma->count, dma->dir);
dma               449 drivers/tty/serial/msm_serial.c 	val &= ~dma->enable_bit;
dma               457 drivers/tty/serial/msm_serial.c 	count = dma->count - state.residue;
dma               459 drivers/tty/serial/msm_serial.c 	dma->count = 0;
dma               480 drivers/tty/serial/msm_serial.c 	struct msm_dma *dma = &msm_port->tx_dma;
dma               487 drivers/tty/serial/msm_serial.c 	dma->phys = dma_map_single(port->dev, cpu_addr, count, dma->dir);
dma               488 drivers/tty/serial/msm_serial.c 	ret = dma_mapping_error(port->dev, dma->phys);
dma               492 drivers/tty/serial/msm_serial.c 	dma->desc = dmaengine_prep_slave_single(dma->chan, dma->phys,
dma               496 drivers/tty/serial/msm_serial.c 	if (!dma->desc) {
dma               501 drivers/tty/serial/msm_serial.c 	dma->desc->callback = msm_complete_tx_dma;
dma               502 drivers/tty/serial/msm_serial.c 	dma->desc->callback_param = msm_port;
dma               504 drivers/tty/serial/msm_serial.c 	dma->cookie = dmaengine_submit(dma->desc);
dma               505 drivers/tty/serial/msm_serial.c 	ret = dma_submit_error(dma->cookie);
dma               516 drivers/tty/serial/msm_serial.c 	dma->count = count;
dma               519 drivers/tty/serial/msm_serial.c 	val |= dma->enable_bit;
dma               529 drivers/tty/serial/msm_serial.c 	dma_async_issue_pending(dma->chan);
dma               532 drivers/tty/serial/msm_serial.c 	dma_unmap_single(port->dev, dma->phys, count, dma->dir);
dma               541 drivers/tty/serial/msm_serial.c 	struct msm_dma *dma = &msm_port->rx_dma;
dma               549 drivers/tty/serial/msm_serial.c 	if (!dma->count)
dma               553 drivers/tty/serial/msm_serial.c 	val &= ~dma->enable_bit;
dma               566 drivers/tty/serial/msm_serial.c 	dma->count = 0;
dma               568 drivers/tty/serial/msm_serial.c 	dma_unmap_single(port->dev, dma->phys, UARTDM_RX_SIZE, dma->dir);
dma               573 drivers/tty/serial/msm_serial.c 		if (msm_port->break_detected && dma->virt[i] == 0) {
dma               585 drivers/tty/serial/msm_serial.c 		sysrq = uart_handle_sysrq_char(port, dma->virt[i]);
dma               588 drivers/tty/serial/msm_serial.c 			tty_insert_flip_char(tport, dma->virt[i], flag);
dma               601 drivers/tty/serial/msm_serial.c 	struct msm_dma *dma = &msm_port->rx_dma;
dma               606 drivers/tty/serial/msm_serial.c 	if (!dma->chan)
dma               609 drivers/tty/serial/msm_serial.c 	dma->phys = dma_map_single(uart->dev, dma->virt,
dma               610 drivers/tty/serial/msm_serial.c 				   UARTDM_RX_SIZE, dma->dir);
dma               611 drivers/tty/serial/msm_serial.c 	ret = dma_mapping_error(uart->dev, dma->phys);
dma               615 drivers/tty/serial/msm_serial.c 	dma->desc = dmaengine_prep_slave_single(dma->chan, dma->phys,
dma               618 drivers/tty/serial/msm_serial.c 	if (!dma->desc)
dma               621 drivers/tty/serial/msm_serial.c 	dma->desc->callback = msm_complete_rx_dma;
dma               622 drivers/tty/serial/msm_serial.c 	dma->desc->callback_param = msm_port;
dma               624 drivers/tty/serial/msm_serial.c 	dma->cookie = dmaengine_submit(dma->desc);
dma               625 drivers/tty/serial/msm_serial.c 	ret = dma_submit_error(dma->cookie);
dma               643 drivers/tty/serial/msm_serial.c 	dma->count = UARTDM_RX_SIZE;
dma               645 drivers/tty/serial/msm_serial.c 	dma_async_issue_pending(dma->chan);
dma               651 drivers/tty/serial/msm_serial.c 	val |= dma->enable_bit;
dma               663 drivers/tty/serial/msm_serial.c 	dma_unmap_single(uart->dev, dma->phys, UARTDM_RX_SIZE, dma->dir);
dma               669 drivers/tty/serial/msm_serial.c 	struct msm_dma *dma = &msm_port->rx_dma;
dma               674 drivers/tty/serial/msm_serial.c 	if (dma->chan)
dma               675 drivers/tty/serial/msm_serial.c 		msm_stop_dma(port, dma);
dma               865 drivers/tty/serial/msm_serial.c 	struct msm_dma *dma = &msm_port->tx_dma;
dma               908 drivers/tty/serial/msm_serial.c 	if (!dma->chan || dma_count < dma_min)
dma               928 drivers/tty/serial/msm_serial.c 	struct msm_dma *dma = &msm_port->rx_dma;
dma               943 drivers/tty/serial/msm_serial.c 		if (dma->count) {
dma               952 drivers/tty/serial/msm_serial.c 			dmaengine_terminate_all(dma->chan);
dma              1243 drivers/tty/serial/msm_serial.c 	struct msm_dma *dma = &msm_port->rx_dma;
dma              1249 drivers/tty/serial/msm_serial.c 	if (dma->chan) /* Terminate if any */
dma              1250 drivers/tty/serial/msm_serial.c 		msm_stop_dma(port, dma);
dma               160 drivers/tty/serial/samsung.c 	struct s3c24xx_uart_dma *dma = ourport->dma;
dma               173 drivers/tty/serial/samsung.c 	if (dma && dma->tx_chan && ourport->tx_in_progress == S3C24XX_TX_DMA) {
dma               174 drivers/tty/serial/samsung.c 		dmaengine_pause(dma->tx_chan);
dma               175 drivers/tty/serial/samsung.c 		dmaengine_tx_status(dma->tx_chan, dma->tx_cookie, &state);
dma               176 drivers/tty/serial/samsung.c 		dmaengine_terminate_all(dma->tx_chan);
dma               178 drivers/tty/serial/samsung.c 			dma->tx_transfer_addr, dma->tx_size, DMA_TO_DEVICE);
dma               179 drivers/tty/serial/samsung.c 		async_tx_ack(dma->tx_desc);
dma               180 drivers/tty/serial/samsung.c 		count = dma->tx_bytes_requested - state.residue;
dma               201 drivers/tty/serial/samsung.c 	struct s3c24xx_uart_dma *dma = ourport->dma;
dma               207 drivers/tty/serial/samsung.c 	dmaengine_tx_status(dma->tx_chan, dma->tx_cookie, &state);
dma               208 drivers/tty/serial/samsung.c 	count = dma->tx_bytes_requested - state.residue;
dma               209 drivers/tty/serial/samsung.c 	async_tx_ack(dma->tx_desc);
dma               211 drivers/tty/serial/samsung.c 	dma_sync_single_for_cpu(ourport->port.dev, dma->tx_transfer_addr,
dma               212 drivers/tty/serial/samsung.c 				dma->tx_size, DMA_TO_DEVICE);
dma               286 drivers/tty/serial/samsung.c 	struct s3c24xx_uart_dma *dma = ourport->dma;
dma               292 drivers/tty/serial/samsung.c 	dma->tx_size = count & ~(dma_get_cache_alignment() - 1);
dma               293 drivers/tty/serial/samsung.c 	dma->tx_transfer_addr = dma->tx_addr + xmit->tail;
dma               295 drivers/tty/serial/samsung.c 	dma_sync_single_for_device(ourport->port.dev, dma->tx_transfer_addr,
dma               296 drivers/tty/serial/samsung.c 				dma->tx_size, DMA_TO_DEVICE);
dma               298 drivers/tty/serial/samsung.c 	dma->tx_desc = dmaengine_prep_slave_single(dma->tx_chan,
dma               299 drivers/tty/serial/samsung.c 				dma->tx_transfer_addr, dma->tx_size,
dma               301 drivers/tty/serial/samsung.c 	if (!dma->tx_desc) {
dma               306 drivers/tty/serial/samsung.c 	dma->tx_desc->callback = s3c24xx_serial_tx_dma_complete;
dma               307 drivers/tty/serial/samsung.c 	dma->tx_desc->callback_param = ourport;
dma               308 drivers/tty/serial/samsung.c 	dma->tx_bytes_requested = dma->tx_size;
dma               311 drivers/tty/serial/samsung.c 	dma->tx_cookie = dmaengine_submit(dma->tx_desc);
dma               312 drivers/tty/serial/samsung.c 	dma_async_issue_pending(dma->tx_chan);
dma               330 drivers/tty/serial/samsung.c 	if (!ourport->dma || !ourport->dma->tx_chan ||
dma               348 drivers/tty/serial/samsung.c 		if (!ourport->dma || !ourport->dma->tx_chan)
dma               352 drivers/tty/serial/samsung.c 	if (ourport->dma && ourport->dma->tx_chan) {
dma               361 drivers/tty/serial/samsung.c 	struct s3c24xx_uart_dma *dma = ourport->dma;
dma               367 drivers/tty/serial/samsung.c 	dma_sync_single_for_cpu(ourport->port.dev, dma->rx_addr,
dma               368 drivers/tty/serial/samsung.c 				dma->rx_size, DMA_FROM_DEVICE);
dma               376 drivers/tty/serial/samsung.c 			((unsigned char *)(ourport->dma->rx_buf)), count);
dma               386 drivers/tty/serial/samsung.c 	struct s3c24xx_uart_dma *dma = ourport->dma;
dma               401 drivers/tty/serial/samsung.c 	if (dma && dma->rx_chan) {
dma               402 drivers/tty/serial/samsung.c 		dmaengine_pause(dma->tx_chan);
dma               403 drivers/tty/serial/samsung.c 		dma_status = dmaengine_tx_status(dma->rx_chan,
dma               404 drivers/tty/serial/samsung.c 				dma->rx_cookie, &state);
dma               407 drivers/tty/serial/samsung.c 			received = dma->rx_bytes_requested - state.residue;
dma               408 drivers/tty/serial/samsung.c 			dmaengine_terminate_all(dma->rx_chan);
dma               449 drivers/tty/serial/samsung.c 	struct s3c24xx_uart_dma *dma = ourport->dma;
dma               457 drivers/tty/serial/samsung.c 	dmaengine_tx_status(dma->rx_chan,  dma->rx_cookie, &state);
dma               458 drivers/tty/serial/samsung.c 	received  = dma->rx_bytes_requested - state.residue;
dma               459 drivers/tty/serial/samsung.c 	async_tx_ack(dma->rx_desc);
dma               478 drivers/tty/serial/samsung.c 	struct s3c24xx_uart_dma *dma = ourport->dma;
dma               480 drivers/tty/serial/samsung.c 	dma_sync_single_for_device(ourport->port.dev, dma->rx_addr,
dma               481 drivers/tty/serial/samsung.c 				dma->rx_size, DMA_FROM_DEVICE);
dma               483 drivers/tty/serial/samsung.c 	dma->rx_desc = dmaengine_prep_slave_single(dma->rx_chan,
dma               484 drivers/tty/serial/samsung.c 				dma->rx_addr, dma->rx_size, DMA_DEV_TO_MEM,
dma               486 drivers/tty/serial/samsung.c 	if (!dma->rx_desc) {
dma               491 drivers/tty/serial/samsung.c 	dma->rx_desc->callback = s3c24xx_serial_rx_dma_complete;
dma               492 drivers/tty/serial/samsung.c 	dma->rx_desc->callback_param = ourport;
dma               493 drivers/tty/serial/samsung.c 	dma->rx_bytes_requested = dma->rx_size;
dma               495 drivers/tty/serial/samsung.c 	dma->rx_cookie = dmaengine_submit(dma->rx_desc);
dma               496 drivers/tty/serial/samsung.c 	dma_async_issue_pending(dma->rx_chan);
dma               552 drivers/tty/serial/samsung.c 	struct s3c24xx_uart_dma *dma = ourport->dma;
dma               571 drivers/tty/serial/samsung.c 		dmaengine_pause(dma->rx_chan);
dma               572 drivers/tty/serial/samsung.c 		dmaengine_tx_status(dma->rx_chan, dma->rx_cookie, &state);
dma               573 drivers/tty/serial/samsung.c 		dmaengine_terminate_all(dma->rx_chan);
dma               574 drivers/tty/serial/samsung.c 		received = dma->rx_bytes_requested - state.residue;
dma               699 drivers/tty/serial/samsung.c 	if (ourport->dma && ourport->dma->rx_chan)
dma               716 drivers/tty/serial/samsung.c 	if (ourport->dma && ourport->dma->tx_chan &&
dma               858 drivers/tty/serial/samsung.c 	struct s3c24xx_uart_dma	*dma = p->dma;
dma               864 drivers/tty/serial/samsung.c 	dma->rx_conf.direction		= DMA_DEV_TO_MEM;
dma               865 drivers/tty/serial/samsung.c 	dma->rx_conf.src_addr_width	= DMA_SLAVE_BUSWIDTH_1_BYTE;
dma               866 drivers/tty/serial/samsung.c 	dma->rx_conf.src_addr		= p->port.mapbase + S3C2410_URXH;
dma               867 drivers/tty/serial/samsung.c 	dma->rx_conf.src_maxburst	= 1;
dma               869 drivers/tty/serial/samsung.c 	dma->tx_conf.direction		= DMA_MEM_TO_DEV;
dma               870 drivers/tty/serial/samsung.c 	dma->tx_conf.dst_addr_width	= DMA_SLAVE_BUSWIDTH_1_BYTE;
dma               871 drivers/tty/serial/samsung.c 	dma->tx_conf.dst_addr		= p->port.mapbase + S3C2410_UTXH;
dma               872 drivers/tty/serial/samsung.c 	dma->tx_conf.dst_maxburst	= 1;
dma               874 drivers/tty/serial/samsung.c 	dma->rx_chan = dma_request_chan(p->port.dev, "rx");
dma               876 drivers/tty/serial/samsung.c 	if (IS_ERR(dma->rx_chan)) {
dma               878 drivers/tty/serial/samsung.c 		ret = PTR_ERR(dma->rx_chan);
dma               882 drivers/tty/serial/samsung.c 	ret = dma_get_slave_caps(dma->rx_chan, &dma_caps);
dma               890 drivers/tty/serial/samsung.c 	dmaengine_slave_config(dma->rx_chan, &dma->rx_conf);
dma               892 drivers/tty/serial/samsung.c 	dma->tx_chan = dma_request_chan(p->port.dev, "tx");
dma               893 drivers/tty/serial/samsung.c 	if (IS_ERR(dma->tx_chan)) {
dma               895 drivers/tty/serial/samsung.c 		ret = PTR_ERR(dma->tx_chan);
dma               899 drivers/tty/serial/samsung.c 	ret = dma_get_slave_caps(dma->tx_chan, &dma_caps);
dma               907 drivers/tty/serial/samsung.c 	dmaengine_slave_config(dma->tx_chan, &dma->tx_conf);
dma               910 drivers/tty/serial/samsung.c 	dma->rx_size = PAGE_SIZE;
dma               912 drivers/tty/serial/samsung.c 	dma->rx_buf = kmalloc(dma->rx_size, GFP_KERNEL);
dma               913 drivers/tty/serial/samsung.c 	if (!dma->rx_buf) {
dma               918 drivers/tty/serial/samsung.c 	dma->rx_addr = dma_map_single(p->port.dev, dma->rx_buf,
dma               919 drivers/tty/serial/samsung.c 				dma->rx_size, DMA_FROM_DEVICE);
dma               920 drivers/tty/serial/samsung.c 	if (dma_mapping_error(p->port.dev, dma->rx_addr)) {
dma               927 drivers/tty/serial/samsung.c 	dma->tx_addr = dma_map_single(p->port.dev, p->port.state->xmit.buf,
dma               929 drivers/tty/serial/samsung.c 	if (dma_mapping_error(p->port.dev, dma->tx_addr)) {
dma               938 drivers/tty/serial/samsung.c 	dma_unmap_single(p->port.dev, dma->rx_addr, dma->rx_size,
dma               941 drivers/tty/serial/samsung.c 	kfree(dma->rx_buf);
dma               943 drivers/tty/serial/samsung.c 	dma_release_channel(dma->tx_chan);
dma               945 drivers/tty/serial/samsung.c 	dma_release_channel(dma->rx_chan);
dma               954 drivers/tty/serial/samsung.c 	struct s3c24xx_uart_dma	*dma = p->dma;
dma               956 drivers/tty/serial/samsung.c 	if (dma->rx_chan) {
dma               957 drivers/tty/serial/samsung.c 		dmaengine_terminate_all(dma->rx_chan);
dma               958 drivers/tty/serial/samsung.c 		dma_unmap_single(p->port.dev, dma->rx_addr,
dma               959 drivers/tty/serial/samsung.c 				dma->rx_size, DMA_FROM_DEVICE);
dma               960 drivers/tty/serial/samsung.c 		kfree(dma->rx_buf);
dma               961 drivers/tty/serial/samsung.c 		dma_release_channel(dma->rx_chan);
dma               962 drivers/tty/serial/samsung.c 		dma->rx_chan = NULL;
dma               965 drivers/tty/serial/samsung.c 	if (dma->tx_chan) {
dma               966 drivers/tty/serial/samsung.c 		dmaengine_terminate_all(dma->tx_chan);
dma               967 drivers/tty/serial/samsung.c 		dma_unmap_single(p->port.dev, dma->tx_addr,
dma               969 drivers/tty/serial/samsung.c 		dma_release_channel(dma->tx_chan);
dma               970 drivers/tty/serial/samsung.c 		dma->tx_chan = NULL;
dma              1001 drivers/tty/serial/samsung.c 	if (ourport->dma)
dma              1064 drivers/tty/serial/samsung.c 	if (ourport->dma) {
dma              1067 drivers/tty/serial/samsung.c 			devm_kfree(port->dev, ourport->dma);
dma              1068 drivers/tty/serial/samsung.c 			ourport->dma = NULL;
dma              1803 drivers/tty/serial/samsung.c 		ourport->dma = devm_kzalloc(port->dev,
dma              1804 drivers/tty/serial/samsung.c 					    sizeof(*ourport->dma),
dma              1806 drivers/tty/serial/samsung.c 		if (!ourport->dma) {
dma                96 drivers/tty/serial/samsung.h 	struct s3c24xx_uart_dma		*dma;
dma              1617 drivers/tty/serial/sh-sci.c 		dma_addr_t dma;
dma              1622 drivers/tty/serial/sh-sci.c 					 &dma, GFP_KERNEL);
dma              1635 drivers/tty/serial/sh-sci.c 			sg_dma_address(sg) = dma;
dma              1639 drivers/tty/serial/sh-sci.c 			dma += s->buf_len_rx;
dma               863 drivers/tty/synclink.c static int dma[MAX_ISA_DEVICES];
dma               873 drivers/tty/synclink.c module_param_hw_array(dma, int, dma, NULL, 0);
dma              7936 drivers/tty/synclink.c 	dev->dma       = info->dma_level;
dma               770 drivers/usb/cdns3/ep0.c 	cdns3_ep0_run_transfer(priv_dev, request->dma, request->length, 1, zlp);
dma               713 drivers/usb/cdns3/gadget.c 					  buf->buf, buf->dma);
dma               743 drivers/usb/cdns3/gadget.c 					      &buf->dma,
dma               847 drivers/usb/cdns3/gadget.c 		trb_dma = priv_req->aligned_buf->dma;
dma               849 drivers/usb/cdns3/gadget.c 		trb_dma = request->dma;
dma              2563 drivers/usb/cdns3/gadget.c 				  buf->dma);
dma              1179 drivers/usb/cdns3/gadget.h 	dma_addr_t		dma;
dma               291 drivers/usb/cdns3/trace.h 		__field(dma_addr_t, dma)
dma               300 drivers/usb/cdns3/trace.h 		__entry->dma = priv_req->request.dma;
dma               302 drivers/usb/cdns3/trace.h 		__entry->aligned_dma = priv_req->aligned_buf->dma;
dma               306 drivers/usb/cdns3/trace.h 		__get_str(name), __entry->req, __entry->buf, &__entry->dma,
dma                95 drivers/usb/chipidea/ci.h 		dma_addr_t		dma;
dma               138 drivers/usb/chipidea/debug.c 			   i, (u32)hweprx->qh.dma, (u32)hweptx->qh.dma);
dma               172 drivers/usb/chipidea/debug.c 					   (u32)node->dma,
dma                79 drivers/usb/chipidea/udc.c static int hw_device_state(struct ci_hdrc *ci, u32 dma)
dma                81 drivers/usb/chipidea/udc.c 	if (dma) {
dma                82 drivers/usb/chipidea/udc.c 		hw_write(ci, OP_ENDPTLISTADDR, ~0, dma);
dma               351 drivers/usb/chipidea/udc.c 	node->ptr = dma_pool_zalloc(hwep->td_pool, GFP_ATOMIC, &node->dma);
dma               369 drivers/usb/chipidea/udc.c 	temp = (u32) (hwreq->req.dma + hwreq->req.actual);
dma               385 drivers/usb/chipidea/udc.c 		lastnode->ptr->next = cpu_to_le32(node->dma);
dma               433 drivers/usb/chipidea/udc.c 	if (hwreq->req.dma % PAGE_SIZE)
dma               475 drivers/usb/chipidea/udc.c 		u32 next = firstnode->dma & TD_ADDR_MASK;
dma               496 drivers/usb/chipidea/udc.c 	hwep->qh.ptr->td.next = cpu_to_le32(firstnode->dma);
dma               523 drivers/usb/chipidea/udc.c 	dma_pool_free(hwep->td_pool, pending->ptr, pending->dma);
dma               531 drivers/usb/chipidea/udc.c 	hwep->qh.ptr->td.next = cpu_to_le32(node->dma);
dma               640 drivers/usb/chipidea/udc.c 			dma_pool_free(hwep->td_pool, node->ptr, node->dma);
dma              1372 drivers/usb/chipidea/udc.c 		dma_pool_free(hwep->td_pool, node->ptr, node->dma);
dma              1430 drivers/usb/chipidea/udc.c 		dma_pool_free(hwep->td_pool, node->ptr, node->dma);
dma              1547 drivers/usb/chipidea/udc.c 			hw_device_state(ci, ci->ep0out->qh.dma);
dma              1717 drivers/usb/chipidea/udc.c 						       &hwep->qh.dma);
dma              1750 drivers/usb/chipidea/udc.c 		dma_pool_free(ci->qh_pool, hwep->qh.ptr, hwep->qh.dma);
dma              1797 drivers/usb/chipidea/udc.c 	retval = hw_device_state(ci, ci->ep0out->qh.dma);
dma                62 drivers/usb/chipidea/udc.h 	dma_addr_t		dma;
dma              1122 drivers/usb/class/cdc-acm.c 			  acm->read_buffers[i].base, acm->read_buffers[i].dma);
dma              1393 drivers/usb/class/cdc-acm.c 								&rb->dma);
dma              1404 drivers/usb/class/cdc-acm.c 		urb->transfer_dma = rb->dma;
dma                78 drivers/usb/class/cdc-acm.h 	dma_addr_t		dma;
dma               117 drivers/usb/core/buffer.c 	dma_addr_t		*dma
dma               127 drivers/usb/core/buffer.c 		return gen_pool_dma_alloc(hcd->localmem_pool, size, dma);
dma               131 drivers/usb/core/buffer.c 		*dma = ~(dma_addr_t) 0;
dma               137 drivers/usb/core/buffer.c 			return dma_pool_alloc(hcd->pool[i], mem_flags, dma);
dma               139 drivers/usb/core/buffer.c 	return dma_alloc_coherent(hcd->self.sysdev, size, dma, mem_flags);
dma               146 drivers/usb/core/buffer.c 	dma_addr_t		dma
dma               167 drivers/usb/core/buffer.c 			dma_pool_free(hcd->pool[i], addr, dma);
dma               171 drivers/usb/core/buffer.c 	dma_free_coherent(hcd->self.sysdev, size, addr, dma);
dma              2922 drivers/usb/core/hcd.c 			    dma_addr_t dma, size_t size)
dma              2944 drivers/usb/core/hcd.c 				dma, size, dev_to_node(hcd->self.sysdev));
dma               906 drivers/usb/core/usb.c 			 dma_addr_t *dma)
dma               910 drivers/usb/core/usb.c 	return hcd_buffer_alloc(dev->bus, size, mem_flags, dma);
dma               926 drivers/usb/core/usb.c 		       dma_addr_t dma)
dma               932 drivers/usb/core/usb.c 	hcd_buffer_free(dev->bus, size, addr, dma);
dma               979 drivers/usb/dwc2/gadget.c 		dma_addr_t dma_addr = hs_req->req.dma;
dma              1136 drivers/usb/dwc2/gadget.c 		dwc2_gadget_config_nonisoc_xfer_ddma(hs_ep, ureq->dma + offset,
dma              1154 drivers/usb/dwc2/gadget.c 			dwc2_writel(hsotg, ureq->dma, dma_reg);
dma              1157 drivers/usb/dwc2/gadget.c 				__func__, &ureq->dma, dma_reg);
dma              1442 drivers/usb/dwc2/gadget.c 			dma_addr_t dma_addr = hs_req->req.dma;
dma              2050 drivers/usb/dwc2/gadget.c 		dma_addr_t dma = hs_ep->desc_list_dma;
dma              2055 drivers/usb/dwc2/gadget.c 		dwc2_gadget_config_nonisoc_xfer_ddma(hs_ep, dma, 0);
dma              2422 drivers/usb/dwc2/hcd.c 			chan->xfer_dma = urb->dma;
dma              2645 drivers/usb/dwc2/hcd.c 		chan->xfer_dma = urb->dma + urb->actual_length;
dma              3926 drivers/usb/dwc2/hcd.c 					(unsigned long)urb->dma);
dma              4628 drivers/usb/dwc2/hcd.c 	dwc2_urb->dma = urb->transfer_dma;
dma               191 drivers/usb/dwc2/hcd.h 	dma_addr_t dma;
dma               557 drivers/usb/dwc2/hcd_ddma.c 	dma_desc->buf = (u32)(qtd->urb->dma + frame_desc->offset);
dma               763 drivers/usb/dwc2/hcd_ddma.c 			chan->xfer_dma = qtd->urb->dma +
dma               913 drivers/usb/dwc2/hcd_ddma.c 	dma_desc->buf = (u32)(qtd->urb->dma + frame_desc->offset);
dma               956 drivers/usb/dwc2/hcd_intr.c 		memcpy(qtd->urb->buf + (chan->xfer_dma - qtd->urb->dma),
dma              1625 drivers/usb/dwc2/hcd_intr.c 		urb->buf, (unsigned long)urb->dma);
dma               330 drivers/usb/dwc3/core.c 	dma_free_coherent(dwc->sysdev, evt->length, evt->buf, evt->dma);
dma               357 drivers/usb/dwc3/core.c 			&evt->dma, GFP_KERNEL);
dma               412 drivers/usb/dwc3/core.c 			lower_32_bits(evt->dma));
dma               414 drivers/usb/dwc3/core.c 			upper_32_bits(evt->dma));
dma               638 drivers/usb/dwc3/core.h 	dma_addr_t		dma;
dma               968 drivers/usb/dwc3/ep0.c 		dwc3_ep0_prepare_one_trb(dep, req->request.dma,
dma               990 drivers/usb/dwc3/ep0.c 		dwc3_ep0_prepare_one_trb(dep, req->request.dma,
dma              1008 drivers/usb/dwc3/ep0.c 		dwc3_ep0_prepare_one_trb(dep, req->request.dma,
dma               911 drivers/usb/dwc3/gadget.c 		dma_addr_t dma, unsigned length, unsigned chain, unsigned node,
dma               919 drivers/usb/dwc3/gadget.c 	trb->bpl = lower_32_bits(dma);
dma               920 drivers/usb/dwc3/gadget.c 	trb->bph = upper_32_bits(dma);
dma              1028 drivers/usb/dwc3/gadget.c 	dma_addr_t		dma;
dma              1035 drivers/usb/dwc3/gadget.c 		dma = sg_dma_address(req->start_sg);
dma              1038 drivers/usb/dwc3/gadget.c 		dma = req->request.dma;
dma              1051 drivers/usb/dwc3/gadget.c 	__dwc3_prepare_one_trb(dep, trb, dma, length, chain, node,
dma               178 drivers/usb/early/xhci-dbc.c 	seg->trbs = xdbc_get_page(&seg->dma);
dma               194 drivers/usb/early/xhci-dbc.c 	memblock_free(seg->dma, PAGE_SIZE);
dma               211 drivers/usb/early/xhci-dbc.c 		link_trb->field[0] = cpu_to_le32(lower_32_bits(seg->dma));
dma               212 drivers/usb/early/xhci-dbc.c 		link_trb->field[1] = cpu_to_le32(upper_32_bits(seg->dma));
dma               251 drivers/usb/early/xhci-dbc.c 	entry->seg_addr		= cpu_to_le64(xdbc.evt_seg.dma);
dma               258 drivers/usb/early/xhci-dbc.c 	xdbc_write64(xdbc.evt_seg.dma, &xdbc.xdbc_reg->erdp);
dma               325 drivers/usb/early/xhci-dbc.c 	ep_out->deq		= cpu_to_le64(xdbc.out_seg.dma | xdbc.out_ring.cycle_state);
dma               332 drivers/usb/early/xhci-dbc.c 	ep_in->deq		= cpu_to_le64(xdbc.in_seg.dma | xdbc.in_ring.cycle_state);
dma               111 drivers/usb/early/xhci-dbc.h 	dma_addr_t		dma;
dma               501 drivers/usb/gadget/udc/amd5536udc.h 	u32 __iomem			*dma;
dma                47 drivers/usb/gadget/udc/aspeed-vhub/core.c 	if (req->req.dma) {
dma                51 drivers/usb/gadget/udc/aspeed-vhub/core.c 		req->req.dma = 0;
dma                66 drivers/usb/gadget/udc/aspeed-vhub/epn.c 	if (!req->req.dma) {
dma                77 drivers/usb/gadget/udc/aspeed-vhub/epn.c 		writel(req->req.dma + act, ep->epn.regs + AST_VHUB_EP_DESC_BASE);
dma               127 drivers/usb/gadget/udc/aspeed-vhub/epn.c 	if (!req->req.dma && !ep->epn.is_in && len)
dma               215 drivers/usb/gadget/udc/aspeed-vhub/epn.c 		desc->w0 = cpu_to_le32(req->req.dma + act);
dma               387 drivers/usb/gadget/udc/aspeed-vhub/epn.c 		u_req->dma = 0;
dma               391 drivers/usb/gadget/udc/aspeed-vhub/epn.c 	       u_req->length, (u32)u_req->dma, u_req->zero,
dma               446 drivers/usb/gadget/udc/atmel_usba_udc.c 		usba_dma_writel(ep, ADDRESS, req->req.dma);
dma               738 drivers/usb/gadget/udc/atmel_usba_udc.c 		ep->ep.name, req->req.length, &req->req.dma,
dma               648 drivers/usb/gadget/udc/bcm63xx_udc.c 		d->address = breq->req.dma + breq->offset;
dma               292 drivers/usb/gadget/udc/bdc/bdc.h 	dma_addr_t dma;
dma               145 drivers/usb/gadget/udc/bdc/bdc_cmd.c 	param0 = lower_32_bits(ep->bd_list.bd_table_array[0]->dma);
dma               146 drivers/usb/gadget/udc/bdc/bdc_cmd.c 	param1 = upper_32_bits(ep->bd_list.bd_table_array[0]->dma);
dma                91 drivers/usb/gadget/udc/bdc/bdc_dbg.c 	dma_addr_t dma;
dma               107 drivers/usb/gadget/udc/bdc/bdc_dbg.c 			dma = bd_table->dma + (sizeof(struct bdc_bd) * bdi);
dma               110 drivers/usb/gadget/udc/bdc/bdc_dbg.c 				tbi, bdi, gbdi++, bd, (unsigned long long)dma,
dma                86 drivers/usb/gadget/udc/bdc/bdc_ep.c 				(unsigned long long)bd_table->dma);
dma                90 drivers/usb/gadget/udc/bdc/bdc_ep.c 				bd_table->dma);
dma               108 drivers/usb/gadget/udc/bdc/bdc_ep.c 				cpu_to_le32(lower_32_bits(next_table->dma));
dma               111 drivers/usb/gadget/udc/bdc/bdc_ep.c 				cpu_to_le32(upper_32_bits(next_table->dma));
dma               127 drivers/usb/gadget/udc/bdc/bdc_ep.c 	dma_addr_t dma;
dma               156 drivers/usb/gadget/udc/bdc/bdc_ep.c 							&dma);
dma               162 drivers/usb/gadget/udc/bdc/bdc_ep.c 		bd_table->dma = dma;
dma               167 drivers/usb/gadget/udc/bdc/bdc_ep.c 			(unsigned long long)bd_table->dma, prev_table);
dma               234 drivers/usb/gadget/udc/bdc/bdc_ep.c 		dma_first_bd = bd_table->dma;
dma               235 drivers/usb/gadget/udc/bdc/bdc_ep.c 		dma_last_bd = bd_table->dma +
dma               427 drivers/usb/gadget/udc/bdc/bdc_ep.c 	dma_addr_t buf_add = req->usb_req.dma;
dma               822 drivers/usb/gadget/udc/bdc/bdc_ep.c 	next_bd_dma =  table->dma +
dma              1832 drivers/usb/gadget/udc/bdc/bdc_ep.c 	req->usb_req.dma = DMA_ADDR_INVALID;
dma               840 drivers/usb/gadget/udc/core.c 		req->dma = dma_map_single(dev, req->buf, req->length,
dma               843 drivers/usb/gadget/udc/core.c 		if (dma_mapping_error(dev, req->dma)) {
dma               874 drivers/usb/gadget/udc/core.c 		dma_unmap_single(dev, req->dma, req->length,
dma                95 drivers/usb/gadget/udc/fsl_qe_udc.c 			req->req.dma, req->req.length,
dma                99 drivers/usb/gadget/udc/fsl_qe_udc.c 		req->req.dma = DMA_ADDR_INVALID;
dma               103 drivers/usb/gadget/udc/fsl_qe_udc.c 			req->req.dma, req->req.length,
dma              1670 drivers/usb/gadget/udc/fsl_qe_udc.c 	req->req.dma = DMA_ADDR_INVALID;
dma              1712 drivers/usb/gadget/udc/fsl_qe_udc.c 	if (req->req.dma == DMA_ADDR_INVALID) {
dma              1713 drivers/usb/gadget/udc/fsl_qe_udc.c 		req->req.dma = dma_map_single(ep->udc->gadget.dev.parent,
dma              1722 drivers/usb/gadget/udc/fsl_qe_udc.c 					req->req.dma, req->req.length,
dma               685 drivers/usb/gadget/udc/fsl_udc_core.c 	req->req.dma = DMA_ADDR_INVALID;
dma               775 drivers/usb/gadget/udc/fsl_udc_core.c 		dma_addr_t *dma, int *is_last, gfp_t gfp_flags)
dma               784 drivers/usb/gadget/udc/fsl_udc_core.c 	dtd = dma_pool_alloc(udc_controller->td_pool, gfp_flags, dma);
dma               788 drivers/usb/gadget/udc/fsl_udc_core.c 	dtd->td_dma = *dma;
dma               795 drivers/usb/gadget/udc/fsl_udc_core.c 	swap_temp = (u32) (req->req.dma + req->req.actual);
dma               828 drivers/usb/gadget/udc/fsl_udc_core.c 	VDBG("length = %d address= 0x%x", *length, (int)*dma);
dma               840 drivers/usb/gadget/udc/fsl_udc_core.c 	dma_addr_t dma;
dma               843 drivers/usb/gadget/udc/fsl_udc_core.c 		dtd = fsl_build_dtd(req, &count, &dma, &is_last, gfp_flags);
dma               851 drivers/usb/gadget/udc/fsl_udc_core.c 			last_dtd->next_td_ptr = cpu_to_hc32(dma);
dma               951 drivers/usb/gadget/udc/fusb300_udc.c 	fusb300_fill_idma_prdtbl(ep, req->req.dma, req->req.length);
dma               147 drivers/usb/gadget/udc/goku_udc.c 		ep->dma = (use_dma != 0) && (ep->num == UDC_MSTRD_ENDPOINT);
dma               149 drivers/usb/gadget/udc/goku_udc.c 		ep->dma = (use_dma == 2) && (ep->num == UDC_MSTWR_ENDPOINT);
dma               150 drivers/usb/gadget/udc/goku_udc.c 		if (ep->dma)
dma               163 drivers/usb/gadget/udc/goku_udc.c 		tmp = ((ep->dma || !ep->is_in)
dma               170 drivers/usb/gadget/udc/goku_udc.c 		tmp = (ep->dma ? 0x10/*dma*/ : 0x11/*pio*/) << ep->num;
dma               183 drivers/usb/gadget/udc/goku_udc.c 		ep->dma ? "dma" : "pio",
dma               219 drivers/usb/gadget/udc/goku_udc.c 		if (ep->dma) {
dma               238 drivers/usb/gadget/udc/goku_udc.c 	ep->dma = 0;
dma               311 drivers/usb/gadget/udc/goku_udc.c 	if (ep->dma)
dma               536 drivers/usb/gadget/udc/goku_udc.c 	u32				start = req->req.dma;
dma               620 drivers/usb/gadget/udc/goku_udc.c 	req->req.actual -= req->req.dma;
dma               687 drivers/usb/gadget/udc/goku_udc.c 	req->req.actual = (curr - req->req.dma) + 1;
dma               733 drivers/usb/gadget/udc/goku_udc.c 	if (ep->dma) {
dma               762 drivers/usb/gadget/udc/goku_udc.c 		if (ep->dma)
dma               780 drivers/usb/gadget/udc/goku_udc.c 			&& !ep->dma
dma               798 drivers/usb/gadget/udc/goku_udc.c 	if (ep->dma)
dma               827 drivers/usb/gadget/udc/goku_udc.c 		ep->dma ? "dma" : "pio",
dma               842 drivers/usb/gadget/udc/goku_udc.c 	if (ep->dma && ep->queue.next == &req->queue && !ep->stopped) {
dma               865 drivers/usb/gadget/udc/goku_udc.c 		if (ep->dma) {
dma              1205 drivers/usb/gadget/udc/goku_udc.c 			   ep->dma ? "dma" : "pio",
dma              1222 drivers/usb/gadget/udc/goku_udc.c 			if (ep->dma && req->queue.prev == &ep->queue) {
dma              1227 drivers/usb/gadget/udc/goku_udc.c 				tmp -= req->req.dma;
dma               210 drivers/usb/gadget/udc/goku_udc.h 						dma:1,
dma               486 drivers/usb/gadget/udc/gr_udc.c 		dma_addr_t start = req->req.dma + bytes_used;
dma               539 drivers/usb/gadget/udc/gr_udc.c 		dma_addr_t start = req->req.dma + bytes_used;
dma               934 drivers/usb/gadget/udc/lpc32xx_udc.c 	dma_addr_t			dma;
dma               937 drivers/usb/gadget/udc/lpc32xx_udc.c 	dd = dma_pool_alloc(udc->dd_cache, GFP_ATOMIC | GFP_DMA, &dma);
dma               939 drivers/usb/gadget/udc/lpc32xx_udc.c 		dd->this_dma = dma;
dma              1783 drivers/usb/gadget/udc/lpc32xx_udc.c 		dd->dd_buffer_addr = req->req.dma;
dma               326 drivers/usb/gadget/udc/m66592-udc.c 		u16 pipenum, int dma)
dma               328 drivers/usb/gadget/udc/m66592-udc.c 	if ((pipenum != 0) && dma) {
dma               386 drivers/usb/gadget/udc/m66592-udc.c 	int dma = 0;
dma               410 drivers/usb/gadget/udc/m66592-udc.c 		dma = 1;
dma               454 drivers/usb/gadget/udc/m66592-udc.c 	m66592_ep_setting(m66592, ep, desc, info.pipe, dma);
dma               269 drivers/usb/gadget/udc/mv_u3d_core.c 				unsigned *length, dma_addr_t *dma)
dma               292 drivers/usb/gadget/udc/mv_u3d_core.c 	trb_hw = dma_pool_alloc(u3d->trb_pool, GFP_ATOMIC, dma);
dma               299 drivers/usb/gadget/udc/mv_u3d_core.c 	trb->trb_dma = *dma;
dma               303 drivers/usb/gadget/udc/mv_u3d_core.c 	temp = (u32)(req->req.dma + req->req.actual);
dma               349 drivers/usb/gadget/udc/mv_u3d_core.c 	temp = (u32)(req->req.dma + req->req.actual);
dma               407 drivers/usb/gadget/udc/mv_u3d_core.c 	dma_addr_t dma;
dma               420 drivers/usb/gadget/udc/mv_u3d_core.c 		trb = mv_u3d_build_trb_one(req, &count, &dma);
dma              1911 drivers/usb/gadget/udc/mv_u3d_core.c 	u3d->status_req->req.dma = virt_to_phys(u3d->status_req->req.buf);
dma               336 drivers/usb/gadget/udc/mv_udc_core.c 		dma_addr_t *dma, int *is_last)
dma               360 drivers/usb/gadget/udc/mv_udc_core.c 	dtd = dma_pool_alloc(udc->dtd_pool, GFP_ATOMIC, dma);
dma               364 drivers/usb/gadget/udc/mv_udc_core.c 	dtd->td_dma = *dma;
dma               366 drivers/usb/gadget/udc/mv_udc_core.c 	temp = (u32)(req->req.dma + req->req.actual);
dma               409 drivers/usb/gadget/udc/mv_udc_core.c 	dma_addr_t dma;
dma               412 drivers/usb/gadget/udc/mv_udc_core.c 		dtd = build_dtd(req, &count, &dma, &is_last);
dma               420 drivers/usb/gadget/udc/mv_udc_core.c 			last_dtd->dtd_next = dma;
dma               605 drivers/usb/gadget/udc/mv_udc_core.c 	req->req.dma = DMA_ADDR_INVALID;
dma              1472 drivers/usb/gadget/udc/mv_udc_core.c 	if (req->req.dma == DMA_ADDR_INVALID) {
dma              1473 drivers/usb/gadget/udc/mv_udc_core.c 		req->req.dma = dma_map_single(ep->udc->gadget.dev.parent,
dma              2235 drivers/usb/gadget/udc/mv_udc_core.c 	udc->status_req->req.dma = DMA_ADDR_INVALID;
dma               368 drivers/usb/gadget/udc/net2272.c 	if (use_dma && ep->dma)
dma               707 drivers/usb/gadget/udc/net2272.c 	if (!use_dma || (ep->num < 1) || (ep->num > 2) || !ep->dma)
dma               717 drivers/usb/gadget/udc/net2272.c 		ep->ep.name, req, (unsigned long long) req->req.dma);
dma               734 drivers/usb/gadget/udc/net2272.c 		if (net2272_request_dma(ep->dev, ep->num, req->req.dma, size, 0))
dma               744 drivers/usb/gadget/udc/net2272.c 		if (net2272_request_dma(ep->dev, ep->num, req->req.dma, size, 1))
dma               829 drivers/usb/gadget/udc/net2272.c 	if (use_dma && ep->dma) {
dma               838 drivers/usb/gadget/udc/net2272.c 		(unsigned long long) _req->dma, _req->zero ? "zero" : "!zero");
dma              1378 drivers/usb/gadget/udc/net2272.c 			ep->dma = 1;
dma              1529 drivers/usb/gadget/udc/net2272.c 	      | (ep->dma << DMA_ENDPOINT_SELECT));
dma               433 drivers/usb/gadget/udc/net2272.h 	         dma:1,
dma               320 drivers/usb/gadget/udc/net2280.c 	if (!ep->dma) {				/* pio, per-packet */
dma               349 drivers/usb/gadget/udc/net2280.c 		ep->dma ? "dma" : "pio", max);
dma               391 drivers/usb/gadget/udc/net2280.c 	if (ep->dma) {
dma               392 drivers/usb/gadget/udc/net2280.c 		writel(0, &ep->dma->dmactl);
dma               396 drivers/usb/gadget/udc/net2280.c 			&ep->dma->dmastat);
dma               467 drivers/usb/gadget/udc/net2280.c 	if (ep->dma) {
dma               468 drivers/usb/gadget/udc/net2280.c 		writel(0, &ep->dma->dmactl);
dma               474 drivers/usb/gadget/udc/net2280.c 		       &ep->dma->dmastat);
dma               476 drivers/usb/gadget/udc/net2280.c 		dmastat = readl(&ep->dma->dmastat);
dma               480 drivers/usb/gadget/udc/net2280.c 			writel(0x5a, &ep->dma->dmastat);
dma               532 drivers/usb/gadget/udc/net2280.c 			ep->dma ? "dma" : "pio", _ep->name);
dma               537 drivers/usb/gadget/udc/net2280.c 	if (!ep->dma && ep->num >= 1 && ep->num <= 4)
dma               538 drivers/usb/gadget/udc/net2280.c 		ep->dma = &ep->dev->dma[ep->num - 1];
dma               565 drivers/usb/gadget/udc/net2280.c 	if (ep->dma) {
dma               819 drivers/usb/gadget/udc/net2280.c 	td->dmaaddr = cpu_to_le32 (req->req.dma);
dma               837 drivers/usb/gadget/udc/net2280.c static inline void spin_stop_dma(struct net2280_dma_regs __iomem *dma)
dma               839 drivers/usb/gadget/udc/net2280.c 	handshake(&dma->dmactl, BIT(DMA_ENABLE), 0, 50);
dma               842 drivers/usb/gadget/udc/net2280.c static inline void stop_dma(struct net2280_dma_regs __iomem *dma)
dma               844 drivers/usb/gadget/udc/net2280.c 	writel(readl(&dma->dmactl) & ~BIT(DMA_ENABLE), &dma->dmactl);
dma               845 drivers/usb/gadget/udc/net2280.c 	spin_stop_dma(dma);
dma               850 drivers/usb/gadget/udc/net2280.c 	struct net2280_dma_regs	__iomem *dma = ep->dma;
dma               856 drivers/usb/gadget/udc/net2280.c 	writel(tmp, &dma->dmacount);
dma               857 drivers/usb/gadget/udc/net2280.c 	writel(readl(&dma->dmastat), &dma->dmastat);
dma               859 drivers/usb/gadget/udc/net2280.c 	writel(td_dma, &dma->dmadesc);
dma               862 drivers/usb/gadget/udc/net2280.c 	writel(dmactl, &dma->dmactl);
dma               867 drivers/usb/gadget/udc/net2280.c 	writel(BIT(DMA_START), &dma->dmastat);
dma               873 drivers/usb/gadget/udc/net2280.c 	struct net2280_dma_regs	__iomem *dma = ep->dma;
dma               878 drivers/usb/gadget/udc/net2280.c 	WARN_ON(readl(&dma->dmactl) & BIT(DMA_ENABLE));
dma               879 drivers/usb/gadget/udc/net2280.c 	writel(0, &ep->dma->dmactl);
dma               889 drivers/usb/gadget/udc/net2280.c 			writel(readl(&dma->dmastat), &dma->dmastat);
dma               892 drivers/usb/gadget/udc/net2280.c 			writel(req->req.dma, &dma->dmaaddr);
dma               898 drivers/usb/gadget/udc/net2280.c 					&dma->dmacount);
dma               902 drivers/usb/gadget/udc/net2280.c 			writel(BIT(DMA_ENABLE), &dma->dmactl);
dma               903 drivers/usb/gadget/udc/net2280.c 			writel(BIT(DMA_START), &dma->dmastat);
dma               967 drivers/usb/gadget/udc/net2280.c 	if (ep->dma)
dma              1019 drivers/usb/gadget/udc/net2280.c 	if (ep->dma && _req->length == 0) {
dma              1025 drivers/usb/gadget/udc/net2280.c 	if (ep->dma) {
dma              1042 drivers/usb/gadget/udc/net2280.c 		!((dev->quirks & PLX_PCIE) && ep->dma &&
dma              1046 drivers/usb/gadget/udc/net2280.c 		if (ep->dma)
dma              1093 drivers/usb/gadget/udc/net2280.c 	} else if (ep->dma) {
dma              1159 drivers/usb/gadget/udc/net2280.c 			u32 const ep_dmacount = readl(&ep->dma->dmacount);
dma              1216 drivers/usb/gadget/udc/net2280.c 		writel(BIT(DMA_ABORT), &ep->dma->dmastat);
dma              1217 drivers/usb/gadget/udc/net2280.c 		spin_stop_dma(ep->dma);
dma              1219 drivers/usb/gadget/udc/net2280.c 		stop_dma(ep->dma);
dma              1230 drivers/usb/gadget/udc/net2280.c 	if (ep->dma)
dma              1262 drivers/usb/gadget/udc/net2280.c 	if (ep->dma) {
dma              1263 drivers/usb/gadget/udc/net2280.c 		dmactl = readl(&ep->dma->dmactl);
dma              1265 drivers/usb/gadget/udc/net2280.c 		stop_dma(ep->dma);
dma              1283 drivers/usb/gadget/udc/net2280.c 		if (ep->dma) {
dma              1291 drivers/usb/gadget/udc/net2280.c 					readl(&ep->dma->dmacount),
dma              1305 drivers/usb/gadget/udc/net2280.c 	if (ep->dma) {
dma              1308 drivers/usb/gadget/udc/net2280.c 			stop_dma(ep->dma);
dma              1312 drivers/usb/gadget/udc/net2280.c 				writel(dmactl, &ep->dma->dmactl);
dma              1762 drivers/usb/gadget/udc/net2280.c 		if (!ep->dma)
dma              1768 drivers/usb/gadget/udc/net2280.c 				readl(&ep->dma->dmactl),
dma              1769 drivers/usb/gadget/udc/net2280.c 				readl(&ep->dma->dmastat),
dma              1770 drivers/usb/gadget/udc/net2280.c 				readl(&ep->dma->dmacount),
dma              1771 drivers/usb/gadget/udc/net2280.c 				readl(&ep->dma->dmaaddr),
dma              1772 drivers/usb/gadget/udc/net2280.c 				readl(&ep->dma->dmadesc));
dma              1837 drivers/usb/gadget/udc/net2280.c 				ep->dma ? "dma" : "pio", ep->fifo_size
dma              1856 drivers/usb/gadget/udc/net2280.c 			if (ep->dma && req->td_dma == readl(&ep->dma->dmadesc))
dma              1862 drivers/usb/gadget/udc/net2280.c 					readl(&ep->dma->dmacount));
dma              1873 drivers/usb/gadget/udc/net2280.c 			if (ep->dma) {
dma              2078 drivers/usb/gadget/udc/net2280.c 		if (ep->dma)
dma              2116 drivers/usb/gadget/udc/net2280.c 		struct net2280_dma_regs __iomem *dma;
dma              2118 drivers/usb/gadget/udc/net2280.c 		if (ep->dma) {
dma              2121 drivers/usb/gadget/udc/net2280.c 			dma = &dev->dma[tmp];
dma              2122 drivers/usb/gadget/udc/net2280.c 			writel(BIT(DMA_ABORT), &dma->dmastat);
dma              2123 drivers/usb/gadget/udc/net2280.c 			writel(0, &dma->dmactl);
dma              2170 drivers/usb/gadget/udc/net2280.c 			ep->dma = &dev->dma[tmp - 1];
dma              2212 drivers/usb/gadget/udc/net2280.c 			ep->dma = &dev->dma[i - 1];
dma              2596 drivers/usb/gadget/udc/net2280.c 	if (likely(ep->dma)) {
dma              2627 drivers/usb/gadget/udc/net2280.c 					count = readl(&ep->dma->dmacount);
dma              2629 drivers/usb/gadget/udc/net2280.c 					if (readl(&ep->dma->dmadesc)
dma              2640 drivers/usb/gadget/udc/net2280.c 					    readl(&ep->dma->dmadesc) !=
dma              2643 drivers/usb/gadget/udc/net2280.c 							&ep->dma->dmacount);
dma              2663 drivers/usb/gadget/udc/net2280.c 			writel(BIT(DMA_ABORT), &ep->dma->dmastat);
dma              2664 drivers/usb/gadget/udc/net2280.c 			spin_stop_dma(ep->dma);
dma              3026 drivers/usb/gadget/udc/net2280.c 				if (ep->dma)
dma              3274 drivers/usb/gadget/udc/net2280.c 			if ((dev->quirks & PLX_PCIE) && e->dma)
dma              3462 drivers/usb/gadget/udc/net2280.c 		struct net2280_dma_regs	__iomem *dma;
dma              3470 drivers/usb/gadget/udc/net2280.c 		dma = ep->dma;
dma              3472 drivers/usb/gadget/udc/net2280.c 		if (!dma)
dma              3476 drivers/usb/gadget/udc/net2280.c 		tmp = readl(&dma->dmastat);
dma              3477 drivers/usb/gadget/udc/net2280.c 		writel(tmp, &dma->dmastat);
dma              3481 drivers/usb/gadget/udc/net2280.c 			u32 r_dmacount = readl(&dma->dmacount);
dma              3492 drivers/usb/gadget/udc/net2280.c 		stop_dma(ep->dma);
dma              3508 drivers/usb/gadget/udc/net2280.c 			tmp = readl(&dma->dmactl);
dma              3671 drivers/usb/gadget/udc/net2280.c 	dev->dma = (struct net2280_dma_regs __iomem *) (base + 0x0180);
dma                94 drivers/usb/gadget/udc/net2280.h 	struct net2280_dma_regs			__iomem *dma;
dma               177 drivers/usb/gadget/udc/net2280.h 	struct net2280_dma_regs		__iomem *dma;
dma               538 drivers/usb/gadget/udc/omap_udc.c 		OMAP_DMA_AMODE_POST_INC, req->req.dma + req->req.actual,
dma               565 drivers/usb/gadget/udc/omap_udc.c 		req->req.actual += dma_src_len(ep, req->req.dma
dma               591 drivers/usb/gadget/udc/omap_udc.c 		OMAP_DMA_AMODE_POST_INC, req->req.dma + req->req.actual,
dma               611 drivers/usb/gadget/udc/omap_udc.c 		ep->dma_counter = (u16) (req->req.dma + req->req.actual);
dma               612 drivers/usb/gadget/udc/omap_udc.c 	count = dma_dest_len(ep, req->req.dma + req->req.actual);
dma              2200 drivers/usb/gadget/udc/omap_udc.c 					(ep, req->req.dma + length);
dma               405 drivers/usb/gadget/udc/pch_udc.c 	dma_addr_t			dma;
dma              1449 drivers/usb/gadget/udc/pch_udc.c 		if (req->dma == DMA_ADDR_INVALID) {
dma              1451 drivers/usb/gadget/udc/pch_udc.c 				dma_unmap_single(&dev->pdev->dev, req->req.dma,
dma              1455 drivers/usb/gadget/udc/pch_udc.c 				dma_unmap_single(&dev->pdev->dev, req->req.dma,
dma              1458 drivers/usb/gadget/udc/pch_udc.c 			req->req.dma = DMA_ADDR_INVALID;
dma              1461 drivers/usb/gadget/udc/pch_udc.c 				dma_unmap_single(&dev->pdev->dev, req->dma,
dma              1465 drivers/usb/gadget/udc/pch_udc.c 				dma_unmap_single(&dev->pdev->dev, req->dma,
dma              1471 drivers/usb/gadget/udc/pch_udc.c 			req->dma = DMA_ADDR_INVALID;
dma              1552 drivers/usb/gadget/udc/pch_udc.c 	if (req->dma == DMA_ADDR_INVALID)
dma              1553 drivers/usb/gadget/udc/pch_udc.c 		td->dataptr = req->req.dma;
dma              1555 drivers/usb/gadget/udc/pch_udc.c 		td->dataptr = req->dma;
dma              1762 drivers/usb/gadget/udc/pch_udc.c 	req->req.dma = DMA_ADDR_INVALID;
dma              1763 drivers/usb/gadget/udc/pch_udc.c 	req->dma = DMA_ADDR_INVALID;
dma              1847 drivers/usb/gadget/udc/pch_udc.c 	    ((usbreq->dma == DMA_ADDR_INVALID) || !usbreq->dma)) {
dma              1850 drivers/usb/gadget/udc/pch_udc.c 				usbreq->dma = dma_map_single(&dev->pdev->dev,
dma              1855 drivers/usb/gadget/udc/pch_udc.c 				usbreq->dma = dma_map_single(&dev->pdev->dev,
dma              1867 drivers/usb/gadget/udc/pch_udc.c 				req->dma = dma_map_single(&dev->pdev->dev,
dma              1872 drivers/usb/gadget/udc/pch_udc.c 				req->dma = dma_map_single(&dev->pdev->dev,
dma               412 drivers/usb/gadget/udc/r8a66597-udc.c 				u16 pipenum, int dma)
dma               455 drivers/usb/gadget/udc/r8a66597-udc.c 	int dma = 0;
dma               481 drivers/usb/gadget/udc/r8a66597-udc.c 		dma = 1;
dma               528 drivers/usb/gadget/udc/r8a66597-udc.c 	r8a66597_ep_setting(r8a66597, ep, desc, info.pipe, dma);
dma               633 drivers/usb/gadget/udc/r8a66597-udc.c 	struct r8a66597_dma *dma;
dma               642 drivers/usb/gadget/udc/r8a66597-udc.c 	if (r8a66597->dma.used)
dma               646 drivers/usb/gadget/udc/r8a66597-udc.c 	dma = &r8a66597->dma;
dma               647 drivers/usb/gadget/udc/r8a66597-udc.c 	dma->used = 1;
dma               649 drivers/usb/gadget/udc/r8a66597-udc.c 		dma->dir = 1;
dma               651 drivers/usb/gadget/udc/r8a66597-udc.c 		dma->dir = 0;
dma               657 drivers/usb/gadget/udc/r8a66597-udc.c 	ep->dma = dma;
dma               663 drivers/usb/gadget/udc/r8a66597-udc.c 	return usb_gadget_map_request(&r8a66597->gadget, &req->req, dma->dir);
dma               673 drivers/usb/gadget/udc/r8a66597-udc.c 	usb_gadget_unmap_request(&r8a66597->gadget, &req->req, ep->dma->dir);
dma               678 drivers/usb/gadget/udc/r8a66597-udc.c 	ep->dma->used = 0;
dma               691 drivers/usb/gadget/udc/r8a66597-udc.c 	r8a66597_sudmac_write(r8a66597, req->req.dma, CH0BA);
dma              1433 drivers/usb/gadget/udc/r8a66597-udc.c 		if (ep->dma->dir) {
dma                58 drivers/usb/gadget/udc/r8a66597-udc.h 	struct r8a66597_dma	*dma;
dma                98 drivers/usb/gadget/udc/r8a66597-udc.h 	struct r8a66597_dma	dma;
dma               313 drivers/usb/gadget/udc/renesas_usb3.c 	struct renesas_usb3_dma *dma;
dma               349 drivers/usb/gadget/udc/renesas_usb3.c 	struct renesas_usb3_dma dma[USB3_DMA_NUM_SETTING_AREA];
dma               384 drivers/usb/gadget/udc/renesas_usb3.c #define usb3_get_dma(usb3, i)	(&(usb3)->dma[i])
dma               385 drivers/usb/gadget/udc/renesas_usb3.c #define usb3_for_each_dma(usb3, dma, i)				\
dma               386 drivers/usb/gadget/udc/renesas_usb3.c 		for ((i) = 0, dma = usb3_get_dma((usb3), (i));	\
dma               388 drivers/usb/gadget/udc/renesas_usb3.c 		     (i)++, dma = usb3_get_dma((usb3), (i)))
dma              1249 drivers/usb/gadget/udc/renesas_usb3.c 	struct renesas_usb3_dma *dma;
dma              1266 drivers/usb/gadget/udc/renesas_usb3.c 	usb3_for_each_dma(usb3, dma, i) {
dma              1267 drivers/usb/gadget/udc/renesas_usb3.c 		if (dma->used)
dma              1274 drivers/usb/gadget/udc/renesas_usb3.c 		dma->used = true;
dma              1275 drivers/usb/gadget/udc/renesas_usb3.c 		usb3_ep->dma = dma;
dma              1288 drivers/usb/gadget/udc/renesas_usb3.c 	struct renesas_usb3_dma *dma;
dma              1290 drivers/usb/gadget/udc/renesas_usb3.c 	usb3_for_each_dma(usb3, dma, i) {
dma              1291 drivers/usb/gadget/udc/renesas_usb3.c 		if (usb3_ep->dma == dma) {
dma              1294 drivers/usb/gadget/udc/renesas_usb3.c 			dma->used = false;
dma              1295 drivers/usb/gadget/udc/renesas_usb3.c 			usb3_ep->dma = NULL;
dma              1304 drivers/usb/gadget/udc/renesas_usb3.c 	struct renesas_usb3_prd *cur_prd = usb3_ep->dma->prd;
dma              1306 drivers/usb/gadget/udc/renesas_usb3.c 	u32 dma = usb3_req->req.dma;
dma              1315 drivers/usb/gadget/udc/renesas_usb3.c 		cur_prd->bap = dma;
dma              1317 drivers/usb/gadget/udc/renesas_usb3.c 		dma += len;
dma              1332 drivers/usb/gadget/udc/renesas_usb3.c 	struct renesas_usb3_dma *dma = usb3_ep->dma;
dma              1342 drivers/usb/gadget/udc/renesas_usb3.c 	usb3_write(usb3, AXI_INT_PRDEN_CLR_STA(dma->num) |
dma              1343 drivers/usb/gadget/udc/renesas_usb3.c 		   AXI_INT_PRDERR_STA(dma->num), USB3_AXI_INT_STA);
dma              1345 drivers/usb/gadget/udc/renesas_usb3.c 	usb3_write(usb3, dma->prd_dma, USB3_DMA_CH0_PRD_ADR(dma->num));
dma              1346 drivers/usb/gadget/udc/renesas_usb3.c 	usb3_write(usb3, dma_con, USB3_DMA_CH0_CON(dma->num));
dma              1353 drivers/usb/gadget/udc/renesas_usb3.c 	struct renesas_usb3_dma *dma = usb3_ep->dma;
dma              1356 drivers/usb/gadget/udc/renesas_usb3.c 	usb3_write(usb3, 0, USB3_DMA_CH0_CON(dma->num));
dma              1362 drivers/usb/gadget/udc/renesas_usb3.c 	struct renesas_usb3_prd *cur_prd = usb3_ep->dma->prd;
dma              1419 drivers/usb/gadget/udc/renesas_usb3.c 	if (!usb3_ep->dma)
dma              1437 drivers/usb/gadget/udc/renesas_usb3.c 	struct renesas_usb3_dma *dma;
dma              1439 drivers/usb/gadget/udc/renesas_usb3.c 	usb3_for_each_dma(usb3, dma, i) {
dma              1440 drivers/usb/gadget/udc/renesas_usb3.c 		if (dma->prd) {
dma              1442 drivers/usb/gadget/udc/renesas_usb3.c 					  dma->prd, dma->prd_dma);
dma              1443 drivers/usb/gadget/udc/renesas_usb3.c 			dma->prd = NULL;
dma              1454 drivers/usb/gadget/udc/renesas_usb3.c 	struct renesas_usb3_dma *dma;
dma              1459 drivers/usb/gadget/udc/renesas_usb3.c 	usb3_for_each_dma(usb3, dma, i) {
dma              1460 drivers/usb/gadget/udc/renesas_usb3.c 		dma->prd = dma_alloc_coherent(dev, USB3_DMA_PRD_SIZE,
dma              1461 drivers/usb/gadget/udc/renesas_usb3.c 					      &dma->prd_dma, GFP_KERNEL);
dma              1462 drivers/usb/gadget/udc/renesas_usb3.c 		if (!dma->prd) {
dma              1466 drivers/usb/gadget/udc/renesas_usb3.c 		dma->num = i + 1;
dma              2033 drivers/usb/gadget/udc/renesas_usb3.c 		    AXI_INT_PRDEN_CLR_STA(usb3_ep->dma->num)))
dma               536 drivers/usb/gadget/udc/snps_udc_core.c 	req->req.dma = DMA_DONT_USE;
dma               539 drivers/usb/gadget/udc/snps_udc_core.c 	if (ep->dma) {
dma               818 drivers/usb/gadget/udc/snps_udc_core.c 			td->bufptr = req->req.dma + i; /* assign buffer */
dma               887 drivers/usb/gadget/udc/snps_udc_core.c 	req->td_data->bufptr = req->req.dma;
dma               977 drivers/usb/gadget/udc/snps_udc_core.c 	if (ep->dma)
dma              1082 drivers/usb/gadget/udc/snps_udc_core.c 	if (ep->dma) {
dma              1128 drivers/usb/gadget/udc/snps_udc_core.c 		if (ep->dma) {
dma              1190 drivers/usb/gadget/udc/snps_udc_core.c 	} else if (ep->dma) {
dma              1275 drivers/usb/gadget/udc/snps_udc_core.c 		if (ep->dma && req->dma_going) {
dma              1577 drivers/usb/gadget/udc/snps_udc_core.c 			ep->dma = &dev->regs->ctl;
dma              2693 drivers/usb/gadget/udc/snps_udc_core.c 		if (ep->dma) {
dma              2710 drivers/usb/gadget/udc/snps_udc_core.c 				if (ep->dma) {
dma              2754 drivers/usb/gadget/udc/snps_udc_core.c 		if (!ep->dma) {
dma              3111 drivers/usb/gadget/udc/snps_udc_core.c 	dev->ep[UDC_EP0IN_IX].dma = &dev->regs->ctl;
dma               383 drivers/usb/gadget/udc/udc-xilinx.c 	src = req->usb_req.dma + req->usb_req.actual;
dma               437 drivers/usb/gadget/udc/udc-xilinx.c 	dst = req->usb_req.dma + req->usb_req.actual;
dma               637 drivers/usb/gadget/udc/udc-xilinx.c 							req->usb_req.dma,
dma                26 drivers/usb/host/ehci-mem.c 				  dma_addr_t dma)
dma                29 drivers/usb/host/ehci-mem.c 	qtd->qtd_dma = dma;
dma                39 drivers/usb/host/ehci-mem.c 	dma_addr_t		dma;
dma                41 drivers/usb/host/ehci-mem.c 	qtd = dma_pool_alloc (ehci->qtd_pool, flags, &dma);
dma                43 drivers/usb/host/ehci-mem.c 		ehci_qtd_init(ehci, qtd, dma);
dma                70 drivers/usb/host/ehci-mem.c 	dma_addr_t		dma;
dma                76 drivers/usb/host/ehci-mem.c 		dma_pool_alloc(ehci->qh_pool, flags, &dma);
dma                80 drivers/usb/host/ehci-mem.c 	qh->qh_dma = dma;
dma               983 drivers/usb/host/ehci-q.c 	__hc32		dma = QH_NEXT(ehci, qh->qh_dma);
dma              1002 drivers/usb/host/ehci-q.c 	head->hw->hw_next = dma;
dma              1059 drivers/usb/host/ehci-q.c 			dma_addr_t		dma;
dma              1072 drivers/usb/host/ehci-q.c 			dma = dummy->qtd_dma;
dma              1074 drivers/usb/host/ehci-q.c 			dummy->qtd_dma = dma;
dma              1084 drivers/usb/host/ehci-q.c 			dma = qtd->qtd_dma;
dma              1087 drivers/usb/host/ehci-q.c 			qtd->hw_next = QTD_NEXT(ehci, dma);
dma              1201 drivers/usb/host/ehci-sched.c 	dma_addr_t	dma = urb->transfer_dma;
dma              1216 drivers/usb/host/ehci-sched.c 		buf = dma + urb->iso_frame_desc[i].offset;
dma              2000 drivers/usb/host/ehci-sched.c 	dma_addr_t	dma = urb->transfer_dma;
dma              2015 drivers/usb/host/ehci-sched.c 		buf = dma + urb->iso_frame_desc[i].offset;
dma               277 drivers/usb/host/ehci.h #define	QTD_NEXT(ehci, dma)	cpu_to_hc32(ehci, (u32)dma)
dma               328 drivers/usb/host/ehci.h #define Q_NEXT_TYPE(ehci, dma)	((dma) & cpu_to_hc32(ehci, 3 << 1))
dma               344 drivers/usb/host/ehci.h #define QH_NEXT(ehci, dma) \
dma               345 drivers/usb/host/ehci.h 		(cpu_to_hc32(ehci, (((u32) dma) & ~0x01f) | Q_TYPE_QH))
dma              1810 drivers/usb/host/fotg210-hcd.c 		struct fotg210_qtd *qtd, dma_addr_t dma)
dma              1813 drivers/usb/host/fotg210-hcd.c 	qtd->qtd_dma = dma;
dma              1824 drivers/usb/host/fotg210-hcd.c 	dma_addr_t dma;
dma              1826 drivers/usb/host/fotg210-hcd.c 	qtd = dma_pool_alloc(fotg210->qtd_pool, flags, &dma);
dma              1828 drivers/usb/host/fotg210-hcd.c 		fotg210_qtd_init(fotg210, qtd, dma);
dma              1857 drivers/usb/host/fotg210-hcd.c 	dma_addr_t dma;
dma              1862 drivers/usb/host/fotg210-hcd.c 	qh->hw = dma_pool_zalloc(fotg210->qh_pool, flags, &dma);
dma              1865 drivers/usb/host/fotg210-hcd.c 	qh->qh_dma = dma;
dma              2914 drivers/usb/host/fotg210-hcd.c 	__hc32 dma = QH_NEXT(fotg210, qh->qh_dma);
dma              2933 drivers/usb/host/fotg210-hcd.c 	head->hw->hw_next = dma;
dma              2981 drivers/usb/host/fotg210-hcd.c 			dma_addr_t dma;
dma              2994 drivers/usb/host/fotg210-hcd.c 			dma = dummy->qtd_dma;
dma              2996 drivers/usb/host/fotg210-hcd.c 			dummy->qtd_dma = dma;
dma              3006 drivers/usb/host/fotg210-hcd.c 			dma = qtd->qtd_dma;
dma              3009 drivers/usb/host/fotg210-hcd.c 			qtd->hw_next = QTD_NEXT(fotg210, dma);
dma              4034 drivers/usb/host/fotg210-hcd.c 	dma_addr_t dma = urb->transfer_dma;
dma              4049 drivers/usb/host/fotg210-hcd.c 		buf = dma + urb->iso_frame_desc[i].offset;
dma               310 drivers/usb/host/fotg210.h #define	QTD_NEXT(fotg210, dma)	cpu_to_hc32(fotg210, (u32)dma)
dma               361 drivers/usb/host/fotg210.h #define Q_NEXT_TYPE(fotg210, dma)	((dma) & cpu_to_hc32(fotg210, 3 << 1))
dma               377 drivers/usb/host/fotg210.h #define QH_NEXT(fotg210, dma) \
dma               378 drivers/usb/host/fotg210.h 	(cpu_to_hc32(fotg210, (((u32)dma)&~0x01f)|Q_TYPE_QH))
dma               274 drivers/usb/host/ohci-hub.c 					find_head (ohci->ed_controltail)->dma,
dma               280 drivers/usb/host/ohci-hub.c 			ohci_writel (ohci, find_head (ohci->ed_bulktail)->dma,
dma                92 drivers/usb/host/ohci-mem.c 	dma_addr_t	dma;
dma                98 drivers/usb/host/ohci-mem.c 				sizeof(*td), &dma, 32);
dma               100 drivers/usb/host/ohci-mem.c 		td = dma_pool_zalloc(hc->td_cache, mem_flags, &dma);
dma               103 drivers/usb/host/ohci-mem.c 		td->hwNextTD = cpu_to_hc32 (hc, dma);
dma               104 drivers/usb/host/ohci-mem.c 		td->td_dma = dma;
dma               136 drivers/usb/host/ohci-mem.c 	dma_addr_t	dma;
dma               142 drivers/usb/host/ohci-mem.c 				sizeof(*ed), &dma, 16);
dma               144 drivers/usb/host/ohci-mem.c 		ed = dma_pool_zalloc(hc->ed_cache, mem_flags, &dma);
dma               147 drivers/usb/host/ohci-mem.c 		ed->dma = dma;
dma               161 drivers/usb/host/ohci-mem.c 		dma_pool_free(hc->ed_cache, ed, ed->dma);
dma               173 drivers/usb/host/ohci-q.c 			*prev_p = cpu_to_hc32(ohci, ed->dma);
dma               206 drivers/usb/host/ohci-q.c 			ohci_writel (ohci, ed->dma,
dma               211 drivers/usb/host/ohci-q.c 								ed->dma);
dma               227 drivers/usb/host/ohci-q.c 			ohci_writel (ohci, ed->dma, &ohci->regs->ed_bulkhead);
dma               231 drivers/usb/host/ohci-q.c 								ed->dma);
dma                44 drivers/usb/host/ohci.h 	dma_addr_t		dma;		/* addr of ED */
dma               223 drivers/usb/host/oxu210hp-hcd.c #define	QTD_NEXT(dma)	cpu_to_le32((u32)dma)
dma               273 drivers/usb/host/oxu210hp-hcd.c #define Q_NEXT_TYPE(dma) ((dma) & cpu_to_le32 (3 << 1))
dma               279 drivers/usb/host/oxu210hp-hcd.c #define	QH_NEXT(dma)	(cpu_to_le32(((u32)dma)&~0x01f)|Q_TYPE_QH)
dma               976 drivers/usb/host/oxu210hp-hcd.c static inline void ehci_qtd_init(struct ehci_qtd *qtd, dma_addr_t dma)
dma               979 drivers/usb/host/oxu210hp-hcd.c 	qtd->qtd_dma = dma;
dma              1918 drivers/usb/host/oxu210hp-hcd.c 	__le32 dma = QH_NEXT(qh->qh_dma);
dma              1948 drivers/usb/host/oxu210hp-hcd.c 	head->hw_next = dma;
dma              1996 drivers/usb/host/oxu210hp-hcd.c 			dma_addr_t dma;
dma              2009 drivers/usb/host/oxu210hp-hcd.c 			dma = dummy->qtd_dma;
dma              2011 drivers/usb/host/oxu210hp-hcd.c 			dummy->qtd_dma = dma;
dma              2021 drivers/usb/host/oxu210hp-hcd.c 			dma = qtd->qtd_dma;
dma              2024 drivers/usb/host/oxu210hp-hcd.c 			qtd->hw_next = QTD_NEXT(dma);
dma                92 drivers/usb/host/xhci-dbgcap.c 	dma_addr_t		deq, dma;
dma               101 drivers/usb/host/xhci-dbgcap.c 	dma			= dbc->string_dma;
dma               102 drivers/usb/host/xhci-dbgcap.c 	info->string0		= cpu_to_le64(dma);
dma               103 drivers/usb/host/xhci-dbgcap.c 	info->manufacturer	= cpu_to_le64(dma + DBC_MAX_STRING_LENGTH);
dma               104 drivers/usb/host/xhci-dbgcap.c 	info->product		= cpu_to_le64(dma + DBC_MAX_STRING_LENGTH * 2);
dma               105 drivers/usb/host/xhci-dbgcap.c 	info->serial		= cpu_to_le64(dma + DBC_MAX_STRING_LENGTH * 3);
dma               124 drivers/usb/host/xhci-dbgcap.c 	xhci_write_64(xhci, dbc->ctx->dma, &dbc->regs->dccp);
dma               152 drivers/usb/host/xhci-dbgcap.c 			 req->dma,
dma               249 drivers/usb/host/xhci-dbgcap.c 	num_trbs = count_trbs(req->dma, req->length);
dma               254 drivers/usb/host/xhci-dbgcap.c 	addr	= req->dma;
dma               304 drivers/usb/host/xhci-dbgcap.c 	req->dma = dma_map_single(dev,
dma               308 drivers/usb/host/xhci-dbgcap.c 	if (dma_mapping_error(dev, req->dma)) {
dma               317 drivers/usb/host/xhci-dbgcap.c 				 req->dma,
dma                90 drivers/usb/host/xhci-dbgcap.h 	dma_addr_t			dma;
dma               165 drivers/usb/host/xhci-debugfs.c 	dma_addr_t		dma;
dma               168 drivers/usb/host/xhci-debugfs.c 	dma = xhci_trb_virt_to_dma(ring->enq_seg, ring->enqueue);
dma               169 drivers/usb/host/xhci-debugfs.c 	seq_printf(s, "%pad\n", &dma);
dma               176 drivers/usb/host/xhci-debugfs.c 	dma_addr_t		dma;
dma               179 drivers/usb/host/xhci-debugfs.c 	dma = xhci_trb_virt_to_dma(ring->deq_seg, ring->dequeue);
dma               180 drivers/usb/host/xhci-debugfs.c 	seq_printf(s, "%pad\n", &dma);
dma               198 drivers/usb/host/xhci-debugfs.c 	dma_addr_t		dma;
dma               203 drivers/usb/host/xhci-debugfs.c 		dma = seg->dma + i * sizeof(*trb);
dma               204 drivers/usb/host/xhci-debugfs.c 		seq_printf(s, "%pad: %s\n", &dma,
dma               265 drivers/usb/host/xhci-debugfs.c 	seq_printf(s, "%pad: %s\n", &dev->out_ctx->dma,
dma               277 drivers/usb/host/xhci-debugfs.c 	dma_addr_t		dma;
dma               287 drivers/usb/host/xhci-debugfs.c 		dma = dev->out_ctx->dma + dci * CTX_SIZE(xhci->hcc_params);
dma               288 drivers/usb/host/xhci-debugfs.c 		seq_printf(s, "%pad: %s\n", &dma,
dma                34 drivers/usb/host/xhci-mem.c 	dma_addr_t	dma;
dma                42 drivers/usb/host/xhci-mem.c 	seg->trbs = dma_pool_zalloc(xhci->segment_pool, flags, &dma);
dma                52 drivers/usb/host/xhci-mem.c 			dma_pool_free(xhci->segment_pool, seg->trbs, dma);
dma                62 drivers/usb/host/xhci-mem.c 	seg->dma = dma;
dma                71 drivers/usb/host/xhci-mem.c 		dma_pool_free(xhci->segment_pool, seg->trbs, seg->dma);
dma               109 drivers/usb/host/xhci-mem.c 			cpu_to_le64(next->dma);
dma               192 drivers/usb/host/xhci-mem.c 	key = (unsigned long)(seg->dma >> TRB_SEGMENT_SHIFT);
dma               211 drivers/usb/host/xhci-mem.c 	key = (unsigned long)(seg->dma >> TRB_SEGMENT_SHIFT);
dma               478 drivers/usb/host/xhci-mem.c 	ctx->bytes = dma_pool_zalloc(xhci->device_pool, flags, &ctx->dma);
dma               491 drivers/usb/host/xhci-mem.c 	dma_pool_free(xhci->device_pool, ctx->bytes, ctx->dma);
dma               532 drivers/usb/host/xhci-mem.c 		struct xhci_stream_ctx *stream_ctx, dma_addr_t dma)
dma               539 drivers/usb/host/xhci-mem.c 				stream_ctx, dma);
dma               542 drivers/usb/host/xhci-mem.c 				stream_ctx, dma);
dma               545 drivers/usb/host/xhci-mem.c 				stream_ctx, dma);
dma               559 drivers/usb/host/xhci-mem.c 		unsigned int num_stream_ctxs, dma_addr_t *dma,
dma               567 drivers/usb/host/xhci-mem.c 				dma, mem_flags);
dma               570 drivers/usb/host/xhci-mem.c 				mem_flags, dma);
dma               573 drivers/usb/host/xhci-mem.c 				mem_flags, dma);
dma               680 drivers/usb/host/xhci-mem.c 		addr = cur_ring->first_seg->dma |
dma               994 drivers/usb/host/xhci-mem.c 			(unsigned long long)dev->out_ctx->dma);
dma              1002 drivers/usb/host/xhci-mem.c 			(unsigned long long)dev->in_ctx->dma);
dma              1019 drivers/usb/host/xhci-mem.c 	xhci->dcbaa->dev_context_ptrs[slot_id] = cpu_to_le64(dev->out_ctx->dma);
dma              1205 drivers/usb/host/xhci-mem.c 	ep0_ctx->deq = cpu_to_le64(dev->eps[0].ring->first_seg->dma |
dma              1511 drivers/usb/host/xhci-mem.c 	ep_ctx->deq = cpu_to_le64(ep_ring->first_seg->dma |
dma              1680 drivers/usb/host/xhci-mem.c 		dma_addr_t dma;
dma              1681 drivers/usb/host/xhci-mem.c 		void *buf = dma_alloc_coherent(dev, xhci->page_size, &dma,
dma              1686 drivers/usb/host/xhci-mem.c 		xhci->scratchpad->sp_array[i] = dma;
dma              1818 drivers/usb/host/xhci-mem.c 		entry->seg_addr = cpu_to_le64(seg->dma);
dma              1896 drivers/usb/host/xhci-mem.c 				xhci->dcbaa, xhci->dcbaa->dma);
dma              1985 drivers/usb/host/xhci-mem.c 		{ xhci->event_ring->first_seg->dma - 16, NULL },
dma              1987 drivers/usb/host/xhci-mem.c 		{ xhci->event_ring->first_seg->dma - 1, NULL },
dma              1989 drivers/usb/host/xhci-mem.c 		{ xhci->event_ring->first_seg->dma, xhci->event_ring->first_seg },
dma              1991 drivers/usb/host/xhci-mem.c 		{ xhci->event_ring->first_seg->dma + (TRBS_PER_SEGMENT - 1)*16,
dma              1994 drivers/usb/host/xhci-mem.c 		{ xhci->event_ring->first_seg->dma + (TRBS_PER_SEGMENT - 1)*16 + 1, NULL },
dma              1996 drivers/usb/host/xhci-mem.c 		{ xhci->event_ring->first_seg->dma + (TRBS_PER_SEGMENT)*16, NULL },
dma              2011 drivers/usb/host/xhci-mem.c 			.input_dma = xhci->cmd_ring->first_seg->dma,
dma              2018 drivers/usb/host/xhci-mem.c 			.input_dma = xhci->cmd_ring->first_seg->dma,
dma              2025 drivers/usb/host/xhci-mem.c 			.input_dma = xhci->cmd_ring->first_seg->dma,
dma              2032 drivers/usb/host/xhci-mem.c 			.input_dma = xhci->event_ring->first_seg->dma + 4*16,
dma              2039 drivers/usb/host/xhci-mem.c 			.input_dma = xhci->event_ring->first_seg->dma + 2*16,
dma              2046 drivers/usb/host/xhci-mem.c 			.input_dma = xhci->event_ring->first_seg->dma + 2*16,
dma              2053 drivers/usb/host/xhci-mem.c 			.input_dma = xhci->event_ring->first_seg->dma + (TRBS_PER_SEGMENT - 4)*16,
dma              2060 drivers/usb/host/xhci-mem.c 			.input_dma = xhci->cmd_ring->first_seg->dma + 2*16,
dma              2374 drivers/usb/host/xhci-mem.c 	dma_addr_t	dma;
dma              2423 drivers/usb/host/xhci-mem.c 	xhci->dcbaa = dma_alloc_coherent(dev, sizeof(*xhci->dcbaa), &dma,
dma              2427 drivers/usb/host/xhci-mem.c 	xhci->dcbaa->dma = dma;
dma              2430 drivers/usb/host/xhci-mem.c 			(unsigned long long)xhci->dcbaa->dma, xhci->dcbaa);
dma              2431 drivers/usb/host/xhci-mem.c 	xhci_write_64(xhci, dma, &xhci->op_regs->dcbaa_ptr);
dma              2472 drivers/usb/host/xhci-mem.c 			(unsigned long long)xhci->cmd_ring->first_seg->dma);
dma              2477 drivers/usb/host/xhci-mem.c 		(xhci->cmd_ring->first_seg->dma & (u64) ~CMD_RING_RSVD_BITS) |
dma                77 drivers/usb/host/xhci-ring.c 	return seg->dma + (segment_offset * sizeof(*trb));
dma              1180 drivers/usb/host/xhci-ring.c 				xhci->devs[slot_id]->in_ctx->dma, slot_id,
dma              1796 drivers/usb/host/xhci-ring.c 				(unsigned long long)cur_seg->dma,
dma              1810 drivers/usb/host/xhci-ring.c 						(suspect_dma >= cur_seg->dma &&
dma              2636 drivers/usb/host/xhci-ring.c 		ep_trb = &ep_seg->trbs[(ep_trb_dma - ep_seg->dma) /
dma              4137 drivers/usb/host/xhci-ring.c 		(unsigned long long)deq_state->new_deq_seg->dma,
dma                96 drivers/usb/host/xhci-trace.h 		__entry->ctx_dma = ctx->dma;
dma               188 drivers/usb/host/xhci-trace.h 		__entry->in_ctx = (unsigned long long) vdev->in_ctx->dma;
dma               189 drivers/usb/host/xhci-trace.h 		__entry->out_ctx = (unsigned long long) vdev->out_ctx->dma;
dma               221 drivers/usb/host/xhci-trace.h 		__entry->in_ctx = (unsigned long long) vdev->in_ctx->dma;
dma               222 drivers/usb/host/xhci-trace.h 		__entry->out_ctx = (unsigned long long) vdev->out_ctx->dma;
dma               475 drivers/usb/host/xhci-trace.h 		__entry->enq_seg = ring->enq_seg->dma;
dma               476 drivers/usb/host/xhci-trace.h 		__entry->deq_seg = ring->deq_seg->dma;
dma              2797 drivers/usb/host/xhci.c 				command->in_ctx->dma,
dma              2801 drivers/usb/host/xhci.c 				command->in_ctx->dma,
dma              3223 drivers/usb/host/xhci.c 	err = xhci_queue_configure_endpoint(xhci, cfg_cmd, cfg_cmd->in_ctx->dma,
dma              4134 drivers/usb/host/xhci.c 	ret = xhci_queue_address_device(xhci, command, virt_dev->in_ctx->dma,
dma              4204 drivers/usb/host/xhci.c 			(unsigned long long)virt_dev->out_ctx->dma);
dma               602 drivers/usb/host/xhci.h 	dma_addr_t dma;
dma              1061 drivers/usb/host/xhci.h 	dma_addr_t	dma;
dma              1526 drivers/usb/host/xhci.h 	dma_addr_t		dma;
dma               185 drivers/usb/mtu3/mtu3.h 	dma_addr_t dma;
dma               211 drivers/usb/mtu3/mtu3_debugfs.c 		   &ring->dma, ring->start, ring->end,
dma               224 drivers/usb/mtu3/mtu3_debugfs.c 	dma_addr_t dma;
dma               237 drivers/usb/mtu3/mtu3_debugfs.c 		dma = ring->dma + i * sizeof(*gpd);
dma               239 drivers/usb/mtu3/mtu3_debugfs.c 			   i, &dma, gpd, gpd->dw0_info, gpd->next_gpd,
dma               247 drivers/usb/mtu3/mtu3_gadget.c 	mreq->request.dma = DMA_ADDR_INVALID;
dma                95 drivers/usb/mtu3/mtu3_qmu.c static void write_txq_start_addr(void __iomem *mbase, u8 epnum, dma_addr_t dma)
dma               100 drivers/usb/mtu3/mtu3_qmu.c 		    cpu_to_le32(lower_32_bits(dma)));
dma               103 drivers/usb/mtu3/mtu3_qmu.c 	tqhiar |= QMU_START_ADDR_HI(upper_32_bits(dma));
dma               107 drivers/usb/mtu3/mtu3_qmu.c static void write_rxq_start_addr(void __iomem *mbase, u8 epnum, dma_addr_t dma)
dma               112 drivers/usb/mtu3/mtu3_qmu.c 		    cpu_to_le32(lower_32_bits(dma)));
dma               115 drivers/usb/mtu3/mtu3_qmu.c 	rqhiar |= QMU_START_ADDR_HI(upper_32_bits(dma));
dma               122 drivers/usb/mtu3/mtu3_qmu.c 	dma_addr_t dma_base = ring->dma;
dma               135 drivers/usb/mtu3/mtu3_qmu.c 	dma_addr_t dma_base = ring->dma;
dma               171 drivers/usb/mtu3/mtu3_qmu.c 	gpd = dma_pool_zalloc(mep->mtu->qmu_gpd_pool, GFP_ATOMIC, &ring->dma);
dma               185 drivers/usb/mtu3/mtu3_qmu.c 			ring->start, ring->dma);
dma               254 drivers/usb/mtu3/mtu3_qmu.c 	gpd->buffer = cpu_to_le32(lower_32_bits(req->dma));
dma               255 drivers/usb/mtu3/mtu3_qmu.c 	ext_addr = GPD_EXT_BUF(mtu, upper_32_bits(req->dma));
dma               295 drivers/usb/mtu3/mtu3_qmu.c 	gpd->buffer = cpu_to_le32(lower_32_bits(req->dma));
dma               296 drivers/usb/mtu3/mtu3_qmu.c 	ext_addr = GPD_EXT_BUF(mtu, upper_32_bits(req->dma));
dma               335 drivers/usb/mtu3/mtu3_qmu.c 		write_txq_start_addr(mbase, epnum, ring->dma);
dma               349 drivers/usb/mtu3/mtu3_qmu.c 		write_rxq_start_addr(mbase, epnum, ring->dma);
dma               245 drivers/usb/mtu3/mtu3_trace.h 		__entry->gpd_ring, &__entry->gpd_ring->dma,
dma               126 drivers/usb/musb/cppi_dma.c 		dma_addr_t		dma;
dma               128 drivers/usb/musb/cppi_dma.c 		bd = dma_pool_alloc(cppi->pool, GFP_KERNEL, &dma);
dma               129 drivers/usb/musb/cppi_dma.c 		bd->dma = dma;
dma               149 drivers/usb/musb/cppi_dma.c 			dma_pool_free(cppi->pool, bd, bd->dma);
dma               433 drivers/usb/musb/cppi_dma.c 			tag, bd->dma,
dma               623 drivers/usb/musb/cppi_dma.c 			bd->hw_next = bd->next->dma;
dma               664 drivers/usb/musb/cppi_dma.c 	musb_writel(&tx_ram->tx_head, 0, (u32)tx->freelist->dma);
dma               843 drivers/usb/musb/cppi_dma.c 			tail->hw_next = bd->dma;
dma               892 drivers/usb/musb/cppi_dma.c 		tail->hw_next = bd->dma;
dma               903 drivers/usb/musb/cppi_dma.c 	musb_writel(&rx_ram->rx_head, 0, bd->dma);
dma              1033 drivers/usb/musb/cppi_dma.c 			(unsigned long long)bd->dma, bd->hw_next, bd->hw_bufp,
dma              1066 drivers/usb/musb/cppi_dma.c 		if (bd->dma == safe2ack) {
dma              1070 drivers/usb/musb/cppi_dma.c 			if (bd->dma == safe2ack)
dma              1089 drivers/usb/musb/cppi_dma.c 		if (safe2ack == 0 || safe2ack == rx->last_processed->dma)
dma              1108 drivers/usb/musb/cppi_dma.c 						rx->last_processed->dma
dma              1227 drivers/usb/musb/cppi_dma.c 				musb_writel(&tx_ram->tx_complete, 0, bd->dma);
dma                65 drivers/usb/musb/cppi_dma.h 	dma_addr_t	dma;		/* address of this descriptor */
dma                36 drivers/usb/musb/musb_gadget.c 	struct dma_controller *dma = musb->dma_controller;
dma                40 drivers/usb/musb/musb_gadget.c 	if (!is_dma_capable() || !musb_ep->dma)
dma                47 drivers/usb/musb/musb_gadget.c 	if (dma->is_compatible)
dma                48 drivers/usb/musb/musb_gadget.c 		compatible = dma->is_compatible(musb_ep->dma,
dma                54 drivers/usb/musb/musb_gadget.c 	if (request->request.dma == DMA_ADDR_INVALID) {
dma                69 drivers/usb/musb/musb_gadget.c 		request->request.dma = dma_addr;
dma                73 drivers/usb/musb/musb_gadget.c 			request->request.dma,
dma                88 drivers/usb/musb/musb_gadget.c 	if (!is_buffer_mapped(request) || !musb_ep->dma)
dma                91 drivers/usb/musb/musb_gadget.c 	if (request->request.dma == DMA_ADDR_INVALID) {
dma                98 drivers/usb/musb/musb_gadget.c 			request->request.dma,
dma               103 drivers/usb/musb/musb_gadget.c 		request->request.dma = DMA_ADDR_INVALID;
dma               106 drivers/usb/musb/musb_gadget.c 			request->request.dma,
dma               143 drivers/usb/musb/musb_gadget.c 	if (!dma_mapping_error(&musb->g.dev, request->dma))
dma               166 drivers/usb/musb/musb_gadget.c 	if (is_dma_capable() && ep->dma) {
dma               187 drivers/usb/musb/musb_gadget.c 		value = c->channel_abort(ep->dma);
dma               189 drivers/usb/musb/musb_gadget.c 		c->channel_release(ep->dma);
dma               190 drivers/usb/musb/musb_gadget.c 		ep->dma = NULL;
dma               242 drivers/usb/musb/musb_gadget.c 	if (dma_channel_status(musb_ep->dma) == MUSB_DMA_STATUS_BUSY) {
dma               277 drivers/usb/musb/musb_gadget.c 					musb_ep->dma->max_len);
dma               279 drivers/usb/musb/musb_gadget.c 		use_dma = (request->dma != DMA_ADDR_INVALID && request_size);
dma               285 drivers/usb/musb/musb_gadget.c 				musb_ep->dma->desired_mode = 0;
dma               287 drivers/usb/musb/musb_gadget.c 				musb_ep->dma->desired_mode = 1;
dma               290 drivers/usb/musb/musb_gadget.c 					musb_ep->dma, musb_ep->packet_sz,
dma               291 drivers/usb/musb/musb_gadget.c 					musb_ep->dma->desired_mode,
dma               292 drivers/usb/musb/musb_gadget.c 					request->dma + request->actual, request_size);
dma               294 drivers/usb/musb/musb_gadget.c 				if (musb_ep->dma->desired_mode == 0) {
dma               358 drivers/usb/musb/musb_gadget.c 					musb_ep->dma, musb_ep->packet_sz,
dma               360 drivers/usb/musb/musb_gadget.c 					request->dma + request->actual,
dma               363 drivers/usb/musb/musb_gadget.c 				c->channel_release(musb_ep->dma);
dma               364 drivers/usb/musb/musb_gadget.c 				musb_ep->dma = NULL;
dma               371 drivers/usb/musb/musb_gadget.c 					musb_ep->dma, musb_ep->packet_sz,
dma               373 drivers/usb/musb/musb_gadget.c 					request->dma + request->actual,
dma               414 drivers/usb/musb/musb_gadget.c 	struct dma_channel	*dma;
dma               423 drivers/usb/musb/musb_gadget.c 	dma = is_dma_capable() ? musb_ep->dma : NULL;
dma               445 drivers/usb/musb/musb_gadget.c 	if (dma_channel_status(dma) == MUSB_DMA_STATUS_BUSY) {
dma               458 drivers/usb/musb/musb_gadget.c 		if (dma && (csr & MUSB_TXCSR_DMAENAB)) {
dma               465 drivers/usb/musb/musb_gadget.c 			request->actual += musb_ep->dma->actual_len;
dma               467 drivers/usb/musb/musb_gadget.c 				epnum, csr, musb_ep->dma->actual_len, request);
dma               545 drivers/usb/musb/musb_gadget.c 	if (dma_channel_status(musb_ep->dma) == MUSB_DMA_STATUS_BUSY) {
dma               558 drivers/usb/musb/musb_gadget.c 		struct dma_channel	*channel = musb_ep->dma;
dma               568 drivers/usb/musb/musb_gadget.c 				request->dma + request->actual,
dma               608 drivers/usb/musb/musb_gadget.c 				channel = musb_ep->dma;
dma               651 drivers/usb/musb/musb_gadget.c 					musb_ep->dma->desired_mode = 1;
dma               661 drivers/usb/musb/musb_gadget.c 					musb_ep->dma->desired_mode = 0;
dma               668 drivers/usb/musb/musb_gadget.c 						request->dma
dma               684 drivers/usb/musb/musb_gadget.c 				channel = musb_ep->dma;
dma               707 drivers/usb/musb/musb_gadget.c 					musb_ep->dma->desired_mode = 0;
dma               709 drivers/usb/musb/musb_gadget.c 					musb_ep->dma->desired_mode = 1;
dma               718 drivers/usb/musb/musb_gadget.c 							request->dma
dma               735 drivers/usb/musb/musb_gadget.c 				struct dma_channel *channel = musb_ep->dma;
dma               736 drivers/usb/musb/musb_gadget.c 				u32 dma_addr = request->dma + request->actual;
dma               795 drivers/usb/musb/musb_gadget.c 	struct dma_channel	*dma;
dma               813 drivers/usb/musb/musb_gadget.c 	dma = is_dma_capable() ? musb_ep->dma : NULL;
dma               816 drivers/usb/musb/musb_gadget.c 			csr, dma ? " (dma)" : "", request);
dma               839 drivers/usb/musb/musb_gadget.c 	if (dma_channel_status(dma) == MUSB_DMA_STATUS_BUSY) {
dma               846 drivers/usb/musb/musb_gadget.c 	if (dma && (csr & MUSB_RXCSR_DMAENAB)) {
dma               853 drivers/usb/musb/musb_gadget.c 		request->actual += musb_ep->dma->actual_len;
dma               858 drivers/usb/musb/musb_gadget.c 		if ((dma->desired_mode == 0 && !hw_ep->rx_double_buffered)
dma               859 drivers/usb/musb/musb_gadget.c 				|| (dma->actual_len
dma               868 drivers/usb/musb/musb_gadget.c 				&& (musb_ep->dma->actual_len
dma              1054 drivers/usb/musb/musb_gadget.c 		musb_ep->dma = c->channel_alloc(c, hw_ep,
dma              1057 drivers/usb/musb/musb_gadget.c 		musb_ep->dma = NULL;
dma              1068 drivers/usb/musb/musb_gadget.c 			musb_ep->dma ? "dma, " : "",
dma              1137 drivers/usb/musb/musb_gadget.c 	request->request.dma = DMA_ADDR_INVALID;
dma              1163 drivers/usb/musb/musb_gadget.c 	dma_addr_t		dma;
dma              1296 drivers/usb/musb/musb_gadget.c 	else if (is_dma_capable() && musb_ep->dma) {
dma              1301 drivers/usb/musb/musb_gadget.c 			status = c->channel_abort(musb_ep->dma);
dma                86 drivers/usb/musb/musb_gadget.h 	struct dma_channel		*dma;
dma               347 drivers/usb/musb/musb_host.c 		struct dma_controller	*dma = musb->dma_controller;
dma               352 drivers/usb/musb/musb_host.c 				dma->channel_release(ep->rx_channel);
dma               358 drivers/usb/musb/musb_host.c 				dma->channel_release(ep->tx_channel);
dma               584 drivers/usb/musb/musb_host.c static void musb_tx_dma_set_mode_mentor(struct dma_controller *dma,
dma               623 drivers/usb/musb/musb_host.c static void musb_tx_dma_set_mode_cppi_tusb(struct dma_controller *dma,
dma               642 drivers/usb/musb/musb_host.c static bool musb_tx_dma_program(struct dma_controller *dma,
dma               651 drivers/usb/musb/musb_host.c 		musb_tx_dma_set_mode_mentor(dma, hw_ep, qh, urb, offset,
dma               654 drivers/usb/musb/musb_host.c 		musb_tx_dma_set_mode_cppi_tusb(dma, hw_ep, qh, urb, offset,
dma               667 drivers/usb/musb/musb_host.c 	if (!dma->channel_program(channel, pkt_size, mode,
dma               672 drivers/usb/musb/musb_host.c 		dma->channel_release(channel);
dma               930 drivers/usb/musb/musb_host.c 	struct dma_channel	*dma;
dma               939 drivers/usb/musb/musb_host.c 		dma = is_dma_capable() ? ep->rx_channel : NULL;
dma               955 drivers/usb/musb/musb_host.c 		dma = is_dma_capable() ? ep->tx_channel : NULL;
dma               967 drivers/usb/musb/musb_host.c 		if (dma_channel_status(dma) == MUSB_DMA_STATUS_BUSY) {
dma               968 drivers/usb/musb/musb_host.c 			dma->status = MUSB_DMA_STATUS_CORE_ABORT;
dma               969 drivers/usb/musb/musb_host.c 			musb->dma_controller->channel_abort(dma);
dma               970 drivers/usb/musb/musb_host.c 			urb->actual_length += dma->actual_len;
dma               971 drivers/usb/musb/musb_host.c 			dma->actual_len = 0L;
dma              1234 drivers/usb/musb/musb_host.c 	struct dma_channel	*dma;
dma              1247 drivers/usb/musb/musb_host.c 	dma = is_dma_capable() ? hw_ep->tx_channel : NULL;
dma              1250 drivers/usb/musb/musb_host.c 			dma ? ", dma" : "");
dma              1291 drivers/usb/musb/musb_host.c 		if (dma_channel_status(dma) == MUSB_DMA_STATUS_BUSY) {
dma              1292 drivers/usb/musb/musb_host.c 			dma->status = MUSB_DMA_STATUS_CORE_ABORT;
dma              1293 drivers/usb/musb/musb_host.c 			musb->dma_controller->channel_abort(dma);
dma              1317 drivers/usb/musb/musb_host.c 	if (dma_channel_status(dma) == MUSB_DMA_STATUS_BUSY) {
dma              1322 drivers/usb/musb/musb_host.c 	if (is_dma_capable() && dma && !status) {
dma              1384 drivers/usb/musb/musb_host.c 	if (!status || dma || usb_pipeisoc(pipe)) {
dma              1385 drivers/usb/musb/musb_host.c 		if (dma)
dma              1386 drivers/usb/musb/musb_host.c 			length = dma->actual_len;
dma              1404 drivers/usb/musb/musb_host.c 		} else if (dma && urb->transfer_buffer_length == qh->offset) {
dma              1437 drivers/usb/musb/musb_host.c 	} else if ((usb_pipeisoc(pipe) || transfer_pending) && dma) {
dma              1490 drivers/usb/musb/musb_host.c static int musb_rx_dma_iso_cppi41(struct dma_controller *dma,
dma              1511 drivers/usb/musb/musb_host.c 	return dma->channel_program(channel, qh->maxpacket, 0,
dma              1515 drivers/usb/musb/musb_host.c static inline int musb_rx_dma_iso_cppi41(struct dma_controller *dma,
dma              1561 drivers/usb/musb/musb_host.c static int musb_rx_dma_inventra_cppi41(struct dma_controller *dma,
dma              1592 drivers/usb/musb/musb_host.c 				done = musb_rx_dma_iso_cppi41(dma, hw_ep, qh,
dma              1631 drivers/usb/musb/musb_host.c static int musb_rx_dma_in_inventra_cppi41(struct dma_controller *dma,
dma              1713 drivers/usb/musb/musb_host.c 	done = dma->channel_program(channel, qh->maxpacket,
dma              1718 drivers/usb/musb/musb_host.c 		dma->channel_release(channel);
dma              1731 drivers/usb/musb/musb_host.c static inline int musb_rx_dma_inventra_cppi41(struct dma_controller *dma,
dma              1740 drivers/usb/musb/musb_host.c static inline int musb_rx_dma_in_inventra_cppi41(struct dma_controller *dma,
dma              1768 drivers/usb/musb/musb_host.c 	struct dma_channel	*dma;
dma              1774 drivers/usb/musb/musb_host.c 	dma = is_dma_capable() ? hw_ep->rx_channel : NULL;
dma              1850 drivers/usb/musb/musb_host.c 		if (dma_channel_status(dma) == MUSB_DMA_STATUS_BUSY) {
dma              1851 drivers/usb/musb/musb_host.c 			dma->status = MUSB_DMA_STATUS_CORE_ABORT;
dma              1852 drivers/usb/musb/musb_host.c 			musb->dma_controller->channel_abort(dma);
dma              1853 drivers/usb/musb/musb_host.c 			xfer_len = dma->actual_len;
dma              1861 drivers/usb/musb/musb_host.c 	if (unlikely(dma_channel_status(dma) == MUSB_DMA_STATUS_BUSY)) {
dma              1880 drivers/usb/musb/musb_host.c 		if (dma_channel_status(dma) == MUSB_DMA_STATUS_BUSY) {
dma              1881 drivers/usb/musb/musb_host.c 			dma->status = MUSB_DMA_STATUS_CORE_ABORT;
dma              1882 drivers/usb/musb/musb_host.c 			musb->dma_controller->channel_abort(dma);
dma              1883 drivers/usb/musb/musb_host.c 			xfer_len = dma->actual_len;
dma              1888 drivers/usb/musb/musb_host.c 				xfer_len, dma ? ", dma" : "");
dma              1896 drivers/usb/musb/musb_host.c 	if (dma && (rx_csr & MUSB_RXCSR_DMAENAB)) {
dma              1897 drivers/usb/musb/musb_host.c 		xfer_len = dma->actual_len;
dma              1935 drivers/usb/musb/musb_host.c 		    musb_dma_cppi41(musb)) && dma) {
dma              1951 drivers/usb/musb/musb_host.c 		if (!dma) {
dma              2338 drivers/usb/musb/musb_host.c 	struct dma_channel	*dma = NULL;
dma              2343 drivers/usb/musb/musb_host.c 		dma = is_in ? ep->rx_channel : ep->tx_channel;
dma              2344 drivers/usb/musb/musb_host.c 		if (dma) {
dma              2345 drivers/usb/musb/musb_host.c 			status = ep->musb->dma_controller->channel_abort(dma);
dma              2349 drivers/usb/musb/musb_host.c 			urb->actual_length += dma->actual_len;
dma              2359 drivers/usb/musb/musb_host.c 		if (is_dma_capable() && dma)
dma               824 drivers/usb/renesas_usbhs/fifo.c 	desc = dmaengine_prep_slave_single(chan, pkt->dma + pkt->actual,
dma                52 drivers/usb/renesas_usbhs/fifo.h 	dma_addr_t dma;
dma               206 drivers/usb/renesas_usbhs/mod_gadget.c 		pkt->dma = req->dma;
dma               928 drivers/usb/renesas_usbhs/mod_host.c 		pkt->dma = urb->transfer_dma;
dma               929 drivers/usb/renesas_usbhs/mod_host.c 		if (!pkt->dma)
dma               140 drivers/vfio/vfio_iommu_type1.c 		struct vfio_dma *dma = rb_entry(node, struct vfio_dma, node);
dma               142 drivers/vfio/vfio_iommu_type1.c 		if (start + size <= dma->iova)
dma               144 drivers/vfio/vfio_iommu_type1.c 		else if (start >= dma->iova + dma->size)
dma               147 drivers/vfio/vfio_iommu_type1.c 			return dma;
dma               156 drivers/vfio/vfio_iommu_type1.c 	struct vfio_dma *dma;
dma               160 drivers/vfio/vfio_iommu_type1.c 		dma = rb_entry(parent, struct vfio_dma, node);
dma               162 drivers/vfio/vfio_iommu_type1.c 		if (new->iova + new->size <= dma->iova)
dma               180 drivers/vfio/vfio_iommu_type1.c static struct vfio_pfn *vfio_find_vpfn(struct vfio_dma *dma, dma_addr_t iova)
dma               183 drivers/vfio/vfio_iommu_type1.c 	struct rb_node *node = dma->pfn_list.rb_node;
dma               198 drivers/vfio/vfio_iommu_type1.c static void vfio_link_pfn(struct vfio_dma *dma,
dma               204 drivers/vfio/vfio_iommu_type1.c 	link = &dma->pfn_list.rb_node;
dma               216 drivers/vfio/vfio_iommu_type1.c 	rb_insert_color(&new->node, &dma->pfn_list);
dma               219 drivers/vfio/vfio_iommu_type1.c static void vfio_unlink_pfn(struct vfio_dma *dma, struct vfio_pfn *old)
dma               221 drivers/vfio/vfio_iommu_type1.c 	rb_erase(&old->node, &dma->pfn_list);
dma               224 drivers/vfio/vfio_iommu_type1.c static int vfio_add_to_pfn_list(struct vfio_dma *dma, dma_addr_t iova,
dma               236 drivers/vfio/vfio_iommu_type1.c 	vfio_link_pfn(dma, vpfn);
dma               240 drivers/vfio/vfio_iommu_type1.c static void vfio_remove_from_pfn_list(struct vfio_dma *dma,
dma               243 drivers/vfio/vfio_iommu_type1.c 	vfio_unlink_pfn(dma, vpfn);
dma               247 drivers/vfio/vfio_iommu_type1.c static struct vfio_pfn *vfio_iova_get_vfio_pfn(struct vfio_dma *dma,
dma               250 drivers/vfio/vfio_iommu_type1.c 	struct vfio_pfn *vpfn = vfio_find_vpfn(dma, iova);
dma               257 drivers/vfio/vfio_iommu_type1.c static int vfio_iova_put_vfio_pfn(struct vfio_dma *dma, struct vfio_pfn *vpfn)
dma               262 drivers/vfio/vfio_iommu_type1.c 		ret = put_pfn(vpfn->pfn, dma->prot);
dma               263 drivers/vfio/vfio_iommu_type1.c 		vfio_remove_from_pfn_list(dma, vpfn);
dma               268 drivers/vfio/vfio_iommu_type1.c static int vfio_lock_acct(struct vfio_dma *dma, long npage, bool async)
dma               276 drivers/vfio/vfio_iommu_type1.c 	mm = async ? get_task_mm(dma->task) : dma->task->mm;
dma               282 drivers/vfio/vfio_iommu_type1.c 		ret = __account_locked_vm(mm, abs(npage), npage > 0, dma->task,
dma               283 drivers/vfio/vfio_iommu_type1.c 					  dma->lock_cap);
dma               397 drivers/vfio/vfio_iommu_type1.c static long vfio_pin_pages_remote(struct vfio_dma *dma, unsigned long vaddr,
dma               404 drivers/vfio/vfio_iommu_type1.c 	dma_addr_t iova = vaddr - dma->vaddr + dma->iova;
dma               410 drivers/vfio/vfio_iommu_type1.c 	ret = vaddr_get_pfn(current->mm, vaddr, dma->prot, pfn_base);
dma               421 drivers/vfio/vfio_iommu_type1.c 	if (!rsvd && !vfio_find_vpfn(dma, iova)) {
dma               422 drivers/vfio/vfio_iommu_type1.c 		if (!dma->lock_cap && current->mm->locked_vm + 1 > limit) {
dma               423 drivers/vfio/vfio_iommu_type1.c 			put_pfn(*pfn_base, dma->prot);
dma               437 drivers/vfio/vfio_iommu_type1.c 		ret = vaddr_get_pfn(current->mm, vaddr, dma->prot, &pfn);
dma               443 drivers/vfio/vfio_iommu_type1.c 			put_pfn(pfn, dma->prot);
dma               447 drivers/vfio/vfio_iommu_type1.c 		if (!rsvd && !vfio_find_vpfn(dma, iova)) {
dma               448 drivers/vfio/vfio_iommu_type1.c 			if (!dma->lock_cap &&
dma               450 drivers/vfio/vfio_iommu_type1.c 				put_pfn(pfn, dma->prot);
dma               461 drivers/vfio/vfio_iommu_type1.c 	ret = vfio_lock_acct(dma, lock_acct, false);
dma               467 drivers/vfio/vfio_iommu_type1.c 				put_pfn(pfn, dma->prot);
dma               476 drivers/vfio/vfio_iommu_type1.c static long vfio_unpin_pages_remote(struct vfio_dma *dma, dma_addr_t iova,
dma               484 drivers/vfio/vfio_iommu_type1.c 		if (put_pfn(pfn++, dma->prot)) {
dma               486 drivers/vfio/vfio_iommu_type1.c 			if (vfio_find_vpfn(dma, iova))
dma               492 drivers/vfio/vfio_iommu_type1.c 		vfio_lock_acct(dma, locked - unlocked, true);
dma               497 drivers/vfio/vfio_iommu_type1.c static int vfio_pin_page_external(struct vfio_dma *dma, unsigned long vaddr,
dma               503 drivers/vfio/vfio_iommu_type1.c 	mm = get_task_mm(dma->task);
dma               507 drivers/vfio/vfio_iommu_type1.c 	ret = vaddr_get_pfn(mm, vaddr, dma->prot, pfn_base);
dma               509 drivers/vfio/vfio_iommu_type1.c 		ret = vfio_lock_acct(dma, 1, true);
dma               511 drivers/vfio/vfio_iommu_type1.c 			put_pfn(*pfn_base, dma->prot);
dma               515 drivers/vfio/vfio_iommu_type1.c 					dma->task->comm, task_pid_nr(dma->task),
dma               516 drivers/vfio/vfio_iommu_type1.c 					task_rlimit(dma->task, RLIMIT_MEMLOCK));
dma               524 drivers/vfio/vfio_iommu_type1.c static int vfio_unpin_page_external(struct vfio_dma *dma, dma_addr_t iova,
dma               528 drivers/vfio/vfio_iommu_type1.c 	struct vfio_pfn *vpfn = vfio_find_vpfn(dma, iova);
dma               533 drivers/vfio/vfio_iommu_type1.c 	unlocked = vfio_iova_put_vfio_pfn(dma, vpfn);
dma               536 drivers/vfio/vfio_iommu_type1.c 		vfio_lock_acct(dma, -unlocked, true);
dma               549 drivers/vfio/vfio_iommu_type1.c 	struct vfio_dma *dma;
dma               579 drivers/vfio/vfio_iommu_type1.c 		dma = vfio_find_dma(iommu, iova, PAGE_SIZE);
dma               580 drivers/vfio/vfio_iommu_type1.c 		if (!dma) {
dma               585 drivers/vfio/vfio_iommu_type1.c 		if ((dma->prot & prot) != prot) {
dma               590 drivers/vfio/vfio_iommu_type1.c 		vpfn = vfio_iova_get_vfio_pfn(dma, iova);
dma               596 drivers/vfio/vfio_iommu_type1.c 		remote_vaddr = dma->vaddr + (iova - dma->iova);
dma               597 drivers/vfio/vfio_iommu_type1.c 		ret = vfio_pin_page_external(dma, remote_vaddr, &phys_pfn[i],
dma               602 drivers/vfio/vfio_iommu_type1.c 		ret = vfio_add_to_pfn_list(dma, iova, phys_pfn[i]);
dma               604 drivers/vfio/vfio_iommu_type1.c 			vfio_unpin_page_external(dma, iova, do_accounting);
dma               618 drivers/vfio/vfio_iommu_type1.c 		dma = vfio_find_dma(iommu, iova, PAGE_SIZE);
dma               619 drivers/vfio/vfio_iommu_type1.c 		vfio_unpin_page_external(dma, iova, do_accounting);
dma               646 drivers/vfio/vfio_iommu_type1.c 		struct vfio_dma *dma;
dma               650 drivers/vfio/vfio_iommu_type1.c 		dma = vfio_find_dma(iommu, iova, PAGE_SIZE);
dma               651 drivers/vfio/vfio_iommu_type1.c 		if (!dma)
dma               653 drivers/vfio/vfio_iommu_type1.c 		vfio_unpin_page_external(dma, iova, do_accounting);
dma               661 drivers/vfio/vfio_iommu_type1.c static long vfio_sync_unpin(struct vfio_dma *dma, struct vfio_domain *domain,
dma               671 drivers/vfio/vfio_iommu_type1.c 		unlocked += vfio_unpin_pages_remote(dma,
dma               695 drivers/vfio/vfio_iommu_type1.c 			       struct vfio_dma *dma, dma_addr_t *iova,
dma               726 drivers/vfio/vfio_iommu_type1.c 		*unlocked += vfio_sync_unpin(dma, domain, unmapped_list,
dma               735 drivers/vfio/vfio_iommu_type1.c 			       struct vfio_dma *dma, dma_addr_t *iova,
dma               742 drivers/vfio/vfio_iommu_type1.c 		*unlocked += vfio_unpin_pages_remote(dma, *iova,
dma               752 drivers/vfio/vfio_iommu_type1.c static long vfio_unmap_unpin(struct vfio_iommu *iommu, struct vfio_dma *dma,
dma               755 drivers/vfio/vfio_iommu_type1.c 	dma_addr_t iova = dma->iova, end = dma->iova + dma->size;
dma               762 drivers/vfio/vfio_iommu_type1.c 	if (!dma->size)
dma               779 drivers/vfio/vfio_iommu_type1.c 		iommu_unmap(d->domain, dma->iova, dma->size);
dma               810 drivers/vfio/vfio_iommu_type1.c 		unmapped = unmap_unpin_fast(domain, dma, &iova, len, phys,
dma               815 drivers/vfio/vfio_iommu_type1.c 			unmapped = unmap_unpin_slow(domain, dma, &iova, len,
dma               822 drivers/vfio/vfio_iommu_type1.c 	dma->iommu_mapped = false;
dma               825 drivers/vfio/vfio_iommu_type1.c 		unlocked += vfio_sync_unpin(dma, domain, &unmapped_region_list,
dma               830 drivers/vfio/vfio_iommu_type1.c 		vfio_lock_acct(dma, -unlocked, true);
dma               836 drivers/vfio/vfio_iommu_type1.c static void vfio_remove_dma(struct vfio_iommu *iommu, struct vfio_dma *dma)
dma               838 drivers/vfio/vfio_iommu_type1.c 	vfio_unmap_unpin(iommu, dma, true);
dma               839 drivers/vfio/vfio_iommu_type1.c 	vfio_unlink_dma(iommu, dma);
dma               840 drivers/vfio/vfio_iommu_type1.c 	put_task_struct(dma->task);
dma               841 drivers/vfio/vfio_iommu_type1.c 	kfree(dma);
dma               875 drivers/vfio/vfio_iommu_type1.c 	struct vfio_dma *dma, *dma_last = NULL;
dma               925 drivers/vfio/vfio_iommu_type1.c 		dma = vfio_find_dma(iommu, unmap->iova, 1);
dma               926 drivers/vfio/vfio_iommu_type1.c 		if (dma && dma->iova != unmap->iova) {
dma               930 drivers/vfio/vfio_iommu_type1.c 		dma = vfio_find_dma(iommu, unmap->iova + unmap->size - 1, 0);
dma               931 drivers/vfio/vfio_iommu_type1.c 		if (dma && dma->iova + dma->size != unmap->iova + unmap->size) {
dma               937 drivers/vfio/vfio_iommu_type1.c 	while ((dma = vfio_find_dma(iommu, unmap->iova, unmap->size))) {
dma               938 drivers/vfio/vfio_iommu_type1.c 		if (!iommu->v2 && unmap->iova > dma->iova)
dma               944 drivers/vfio/vfio_iommu_type1.c 		if (dma->task->mm != current->mm)
dma               947 drivers/vfio/vfio_iommu_type1.c 		if (!RB_EMPTY_ROOT(&dma->pfn_list)) {
dma               950 drivers/vfio/vfio_iommu_type1.c 			if (dma_last == dma) {
dma               953 drivers/vfio/vfio_iommu_type1.c 				dma_last = dma;
dma               957 drivers/vfio/vfio_iommu_type1.c 			nb_unmap.iova = dma->iova;
dma               958 drivers/vfio/vfio_iommu_type1.c 			nb_unmap.size = dma->size;
dma               972 drivers/vfio/vfio_iommu_type1.c 		unmapped += dma->size;
dma               973 drivers/vfio/vfio_iommu_type1.c 		vfio_remove_dma(iommu, dma);
dma              1009 drivers/vfio/vfio_iommu_type1.c static int vfio_pin_map_dma(struct vfio_iommu *iommu, struct vfio_dma *dma,
dma              1012 drivers/vfio/vfio_iommu_type1.c 	dma_addr_t iova = dma->iova;
dma              1013 drivers/vfio/vfio_iommu_type1.c 	unsigned long vaddr = dma->vaddr;
dma              1021 drivers/vfio/vfio_iommu_type1.c 		npage = vfio_pin_pages_remote(dma, vaddr + dma->size,
dma              1030 drivers/vfio/vfio_iommu_type1.c 		ret = vfio_iommu_map(iommu, iova + dma->size, pfn, npage,
dma              1031 drivers/vfio/vfio_iommu_type1.c 				     dma->prot);
dma              1033 drivers/vfio/vfio_iommu_type1.c 			vfio_unpin_pages_remote(dma, iova + dma->size, pfn,
dma              1039 drivers/vfio/vfio_iommu_type1.c 		dma->size += npage << PAGE_SHIFT;
dma              1042 drivers/vfio/vfio_iommu_type1.c 	dma->iommu_mapped = true;
dma              1045 drivers/vfio/vfio_iommu_type1.c 		vfio_remove_dma(iommu, dma);
dma              1079 drivers/vfio/vfio_iommu_type1.c 	struct vfio_dma *dma;
dma              1119 drivers/vfio/vfio_iommu_type1.c 	dma = kzalloc(sizeof(*dma), GFP_KERNEL);
dma              1120 drivers/vfio/vfio_iommu_type1.c 	if (!dma) {
dma              1126 drivers/vfio/vfio_iommu_type1.c 	dma->iova = iova;
dma              1127 drivers/vfio/vfio_iommu_type1.c 	dma->vaddr = vaddr;
dma              1128 drivers/vfio/vfio_iommu_type1.c 	dma->prot = prot;
dma              1156 drivers/vfio/vfio_iommu_type1.c 	dma->task = current->group_leader;
dma              1157 drivers/vfio/vfio_iommu_type1.c 	dma->lock_cap = capable(CAP_IPC_LOCK);
dma              1159 drivers/vfio/vfio_iommu_type1.c 	dma->pfn_list = RB_ROOT;
dma              1162 drivers/vfio/vfio_iommu_type1.c 	vfio_link_dma(iommu, dma);
dma              1166 drivers/vfio/vfio_iommu_type1.c 		dma->size = size;
dma              1168 drivers/vfio/vfio_iommu_type1.c 		ret = vfio_pin_map_dma(iommu, dma, size);
dma              1200 drivers/vfio/vfio_iommu_type1.c 		struct vfio_dma *dma;
dma              1203 drivers/vfio/vfio_iommu_type1.c 		dma = rb_entry(n, struct vfio_dma, node);
dma              1204 drivers/vfio/vfio_iommu_type1.c 		iova = dma->iova;
dma              1206 drivers/vfio/vfio_iommu_type1.c 		while (iova < dma->iova + dma->size) {
dma              1210 drivers/vfio/vfio_iommu_type1.c 			if (dma->iommu_mapped) {
dma              1224 drivers/vfio/vfio_iommu_type1.c 				while (i < dma->iova + dma->size &&
dma              1232 drivers/vfio/vfio_iommu_type1.c 				unsigned long vaddr = dma->vaddr +
dma              1233 drivers/vfio/vfio_iommu_type1.c 						     (iova - dma->iova);
dma              1234 drivers/vfio/vfio_iommu_type1.c 				size_t n = dma->iova + dma->size - iova;
dma              1237 drivers/vfio/vfio_iommu_type1.c 				npage = vfio_pin_pages_remote(dma, vaddr,
dma              1251 drivers/vfio/vfio_iommu_type1.c 					size, dma->prot | domain->prot);
dma              1257 drivers/vfio/vfio_iommu_type1.c 		dma->iommu_mapped = true;
dma              1868 drivers/vfio/vfio_iommu_type1.c 		struct vfio_dma *dma;
dma              1871 drivers/vfio/vfio_iommu_type1.c 		dma = rb_entry(n, struct vfio_dma, node);
dma              1872 drivers/vfio/vfio_iommu_type1.c 		unlocked += vfio_unmap_unpin(iommu, dma, false);
dma              1873 drivers/vfio/vfio_iommu_type1.c 		p = rb_first(&dma->pfn_list);
dma              1881 drivers/vfio/vfio_iommu_type1.c 		vfio_lock_acct(dma, locked - unlocked, true);
dma              1891 drivers/vfio/vfio_iommu_type1.c 		struct vfio_dma *dma;
dma              1893 drivers/vfio/vfio_iommu_type1.c 		dma = rb_entry(n, struct vfio_dma, node);
dma              1895 drivers/vfio/vfio_iommu_type1.c 		if (WARN_ON(!RB_EMPTY_ROOT(&dma->pfn_list)))
dma               813 drivers/video/fbdev/amba-clcd.c 	dma_addr_t dma;
dma               823 drivers/video/fbdev/amba-clcd.c 			&dma, GFP_KERNEL);
dma               827 drivers/video/fbdev/amba-clcd.c 	fb->fb.fix.smem_start = dma;
dma                72 drivers/video/fbdev/gbefb.c 	dma_addr_t dma;
dma               757 drivers/video/fbdev/gbefb.c 	SET_GBE_FIELD(FRM_CONTROL, FRM_TILE_PTR, val, gbe_tiles.dma >> 9);
dma              1162 drivers/video/fbdev/gbefb.c 				&gbe_tiles.dma, GFP_KERNEL);
dma                90 drivers/video/fbdev/pxafb.c static int setup_frame_dma(struct pxafb_info *fbi, int dma, int pal,
dma               542 drivers/video/fbdev/pxafb.c 	int dma = DMA_MAX + DMA_BASE;
dma               559 drivers/video/fbdev/pxafb.c 		lcd_writel(fbi, FBR1, fbi->fdadr[dma + 1] | 0x1);
dma               561 drivers/video/fbdev/pxafb.c 	lcd_writel(fbi, FBR0, fbi->fdadr[dma] | 0x1);
dma              1066 drivers/video/fbdev/pxafb.c static int setup_frame_dma(struct pxafb_info *fbi, int dma, int pal,
dma              1072 drivers/video/fbdev/pxafb.c 	if (dma < 0 || dma >= DMA_MAX * 2)
dma              1075 drivers/video/fbdev/pxafb.c 	dma_desc = &fbi->dma_buff->dma_desc[dma];
dma              1076 drivers/video/fbdev/pxafb.c 	dma_desc_off = offsetof(struct pxafb_dma_buff, dma_desc[dma]);
dma              1084 drivers/video/fbdev/pxafb.c 		fbi->fdadr[dma] = fbi->dma_buff_phys + dma_desc_off;
dma              1102 drivers/video/fbdev/pxafb.c 		fbi->fdadr[dma] = fbi->dma_buff_phys + dma_desc_off;
dma              1113 drivers/video/fbdev/pxafb.c 	int nbytes, dma, pal, bpp = var->bits_per_pixel;
dma              1116 drivers/video/fbdev/pxafb.c 	dma = DMA_BASE + (branch ? DMA_MAX : 0);
dma              1124 drivers/video/fbdev/pxafb.c 		setup_frame_dma(fbi, dma + 1, PAL_NONE, offset + nbytes, nbytes);
dma              1127 drivers/video/fbdev/pxafb.c 	setup_frame_dma(fbi, dma, pal, offset, nbytes);
dma              1506 drivers/vme/bridges/vme_ca91cx42.c 	dma_addr_t *dma)
dma              1513 drivers/vme/bridges/vme_ca91cx42.c 	return pci_alloc_consistent(pdev, size, dma);
dma              1517 drivers/vme/bridges/vme_ca91cx42.c 	void *vaddr, dma_addr_t dma)
dma              1524 drivers/vme/bridges/vme_ca91cx42.c 	pci_free_consistent(pdev, size, vaddr, dma);
dma              1004 drivers/vme/bridges/vme_fake.c 		dma_addr_t *dma)
dma              1009 drivers/vme/bridges/vme_fake.c 		*dma = fake_ptr_to_pci(alloc);
dma              1015 drivers/vme/bridges/vme_fake.c 		void *vaddr, dma_addr_t dma)
dma              2158 drivers/vme/bridges/vme_tsi148.c 	dma_addr_t *dma)
dma              2165 drivers/vme/bridges/vme_tsi148.c 	return pci_alloc_consistent(pdev, size, dma);
dma              2169 drivers/vme/bridges/vme_tsi148.c 	void *vaddr, dma_addr_t dma)
dma              2176 drivers/vme/bridges/vme_tsi148.c 	pci_free_consistent(pdev, size, vaddr, dma);
dma                87 drivers/vme/vme.c 	dma_addr_t *dma)
dma               113 drivers/vme/vme.c 	return bridge->alloc_consistent(bridge->parent, size, dma);
dma               127 drivers/vme/vme.c 	void *vaddr, dma_addr_t dma)
dma               153 drivers/vme/vme.c 	bridge->free_consistent(bridge->parent, size, vaddr, dma);
dma               173 drivers/vme/vme_bridge.h 		dma_addr_t *dma);
dma               175 drivers/vme/vme_bridge.h 		void *vaddr, dma_addr_t dma);
dma                65 drivers/xen/swiotlb-xen.c 	dma_addr_t dma = (dma_addr_t)bfn << XEN_PAGE_SHIFT;
dma                67 drivers/xen/swiotlb-xen.c 	dma |= paddr & ~XEN_PAGE_MASK;
dma                69 drivers/xen/swiotlb-xen.c 	return dma;
dma                75 drivers/xen/swiotlb-xen.c 	dma_addr_t dma = (dma_addr_t)xen_pfn << XEN_PAGE_SHIFT;
dma                76 drivers/xen/swiotlb-xen.c 	phys_addr_t paddr = dma;
dma               616 include/acpi/acrestyp.h 	struct acpi_resource_dma dma;
dma               333 include/drm/drm_device.h 	struct drm_device_dma *dma;
dma               257 include/linux/alcor_pci.h 	u8	dma;
dma                92 include/linux/async_tx.h 		struct dma_device *dma = chan->device;
dma                94 include/linux/async_tx.h 		dma->device_issue_pending(chan);
dma               142 include/linux/async_tx.h 	dma_addr_t dma;
dma               338 include/linux/compat.h 	unsigned char dma;
dma              1069 include/linux/dmaengine.h dma_set_maxpq(struct dma_device *dma, int maxpq, int has_pq_continue)
dma              1071 include/linux/dmaengine.h 	dma->max_pq = maxpq;
dma              1073 include/linux/dmaengine.h 		dma->max_pq |= DMA_HAS_PQ_CONTINUE;
dma              1088 include/linux/dmaengine.h static inline bool dma_dev_has_pq_continue(struct dma_device *dma)
dma              1090 include/linux/dmaengine.h 	return (dma->max_pq & DMA_HAS_PQ_CONTINUE) == DMA_HAS_PQ_CONTINUE;
dma              1093 include/linux/dmaengine.h static inline unsigned short dma_dev_to_maxpq(struct dma_device *dma)
dma              1095 include/linux/dmaengine.h 	return dma->max_pq & ~DMA_HAS_PQ_CONTINUE;
dma              1111 include/linux/dmaengine.h static inline int dma_maxpq(struct dma_device *dma, enum dma_ctrl_flags flags)
dma              1113 include/linux/dmaengine.h 	if (dma_dev_has_pq_continue(dma) || !dmaf_continue(flags))
dma              1114 include/linux/dmaengine.h 		return dma_dev_to_maxpq(dma);
dma              1116 include/linux/dmaengine.h 		return dma_dev_to_maxpq(dma) - 1;
dma              1118 include/linux/dmaengine.h 		return dma_dev_to_maxpq(dma) - 3;
dma               157 include/linux/genalloc.h 		dma_addr_t *dma);
dma               159 include/linux/genalloc.h 		dma_addr_t *dma, genpool_algo_t algo, void *data);
dma               161 include/linux/genalloc.h 		dma_addr_t *dma, int align);
dma               162 include/linux/genalloc.h extern void *gen_pool_dma_zalloc(struct gen_pool *pool, size_t size, dma_addr_t *dma);
dma               164 include/linux/genalloc.h 		dma_addr_t *dma, genpool_algo_t algo, void *data);
dma               166 include/linux/genalloc.h 		dma_addr_t *dma, int align);
dma               269 include/linux/hdlcdrv.h 				    unsigned int dma);
dma               558 include/linux/ide.h 	u8	dma;			/* atapi dma flag */
dma               771 include/linux/libata.h 	u32 dma;
dma               124 include/linux/mfd/stm32-timers.h 	struct stm32_timers_dma dma; /* Only to be used by the parent */
dma               265 include/linux/mlx4/cmd.h 	dma_addr_t		dma;
dma               677 include/linux/mlx4/device.h 	dma_addr_t		dma;
dma               280 include/linux/mlx5/driver.h 	dma_addr_t	dma;
dma               319 include/linux/mlx5/driver.h 	dma_addr_t	dma;
dma               730 include/linux/mlx5/driver.h 	dma_addr_t		dma;
dma              1870 include/linux/netdevice.h 	unsigned char		dma;
dma               196 include/linux/parport.h 	int dma;
dma               278 include/linux/parport.h struct parport *parport_register_port(unsigned long base, int irq, int dma,
dma               234 include/linux/parport_pc.h 					     int irq, int dma,
dma                29 include/linux/platform_data/s3c-hsotg.h 	enum dwc2_hsotg_dmamode	dma;
dma               331 include/linux/remoteproc.h 	dma_addr_t dma;
dma               602 include/linux/remoteproc.h 		     void *va, dma_addr_t dma, int len, u32 da,
dma               291 include/linux/rio.h 	struct dma_device	dma;
dma               518 include/linux/rio.h 	return container_of(ddev, struct rio_mport, dma);
dma               126 include/linux/serial_8250.h 	struct uart_8250_dma	*dma;
dma                75 include/linux/soc/ti/knav_qmss.h int knav_queue_push(void *qhandle, dma_addr_t dma,
dma                85 include/linux/soc/ti/knav_qmss.h 					dma_addr_t *dma, unsigned *dma_sz);
dma                86 include/linux/soc/ti/knav_qmss.h void *knav_pool_desc_unmap(void *ph, dma_addr_t dma, unsigned dma_sz);
dma                88 include/linux/soc/ti/knav_qmss.h void *knav_pool_desc_dma_to_virt(void *ph, dma_addr_t dma);
dma              1759 include/linux/usb.h 	gfp_t mem_flags, dma_addr_t *dma);
dma              1761 include/linux/usb.h 	void *addr, dma_addr_t dma);
dma               100 include/linux/usb/gadget.h 	dma_addr_t		dma;
dma               473 include/linux/usb/hcd.h 			    dma_addr_t dma, size_t size);
dma               499 include/linux/usb/hcd.h 	gfp_t mem_flags, dma_addr_t *dma);
dma               501 include/linux/usb/hcd.h 	void *addr, dma_addr_t dma);
dma                67 include/media/drv-intf/saa7146.h 	dma_addr_t	dma;
dma                71 include/media/videobuf-dma-sg.h 	struct videobuf_dmabuf  dma;
dma                84 include/media/videobuf-dma-sg.h int videobuf_dma_free(struct videobuf_dmabuf *dma);
dma                86 include/media/videobuf-dma-sg.h int videobuf_dma_unmap(struct device *dev, struct videobuf_dmabuf *dma);
dma                28 include/net/xdp_sock.h 	dma_addr_t dma;
dma               167 include/net/xdp_sock.h 	return umem->pages[addr >> PAGE_SHIFT].dma + (addr & ~PAGE_MASK);
dma               271 include/sound/core.h void snd_dma_program(unsigned long dma, unsigned long addr, unsigned int size, unsigned short mode);
dma               272 include/sound/core.h void snd_dma_disable(unsigned long dma);
dma               273 include/sound/core.h unsigned int snd_dma_pointer(unsigned long dma, unsigned int size);
dma              1325 include/sound/pcm.h static inline void snd_pcm_limit_isa_dma_size(int dma, size_t *max)
dma              1327 include/sound/pcm.h 	*max = dma < 4 ? 64 * 1024 : 128 * 1024;
dma               114 include/sound/wss.h 			  void *dma_private_data, int dma);
dma               116 include/sound/wss.h 			    void *dma_private_data, int dma);
dma               207 include/uapi/drm/radeon_drm.h 	} dma;
dma               271 include/uapi/drm/radeon_drm.h 	} dma;
dma                19 include/uapi/linux/hdlcdrv.h 	int dma;
dma               199 include/uapi/linux/if.h 	unsigned char dma;
dma                84 include/uapi/linux/if_link.h 	__u8	dma;
dma               145 include/uapi/rdma/rdma_user_rxe.h 	struct rxe_dma_info	dma;
dma               152 include/uapi/rdma/rdma_user_rxe.h 	struct rxe_dma_info	dma;
dma               500 include/video/imx-ipu-v3.h 	int dma[2];
dma               387 kernel/dma/coherent.c RESERVEDMEM_OF_DECLARE(dma, "shared-dma-pool", rmem_dma_setup);
dma               339 lib/genalloc.c void *gen_pool_dma_alloc(struct gen_pool *pool, size_t size, dma_addr_t *dma)
dma               341 lib/genalloc.c 	return gen_pool_dma_alloc_algo(pool, size, dma, pool->algo, pool->data);
dma               361 lib/genalloc.c 		dma_addr_t *dma, genpool_algo_t algo, void *data)
dma               372 lib/genalloc.c 	if (dma)
dma               373 lib/genalloc.c 		*dma = gen_pool_virt_to_phys(pool, vaddr);
dma               394 lib/genalloc.c 		dma_addr_t *dma, int align)
dma               398 lib/genalloc.c 	return gen_pool_dma_alloc_algo(pool, size, dma,
dma               417 lib/genalloc.c void *gen_pool_dma_zalloc(struct gen_pool *pool, size_t size, dma_addr_t *dma)
dma               419 lib/genalloc.c 	return gen_pool_dma_zalloc_algo(pool, size, dma, pool->algo, pool->data);
dma               439 lib/genalloc.c 		dma_addr_t *dma, genpool_algo_t algo, void *data)
dma               441 lib/genalloc.c 	void *vaddr = gen_pool_dma_alloc_algo(pool, size, dma, algo, data);
dma               465 lib/genalloc.c 		dma_addr_t *dma, int align)
dma               469 lib/genalloc.c 	return gen_pool_dma_zalloc_algo(pool, size, dma,
dma                56 mm/dmapool.c   	dma_addr_t dma;
dma               229 mm/dmapool.c   					 &page->dma, mem_flags);
dma               251 mm/dmapool.c   	dma_addr_t dma = page->dma;
dma               256 mm/dmapool.c   	dma_free_coherent(pool->dev, pool->allocation, page->vaddr, dma);
dma               350 mm/dmapool.c   	*handle = offset + page->dma;
dma               388 mm/dmapool.c   static struct dma_page *pool_find_page(struct dma_pool *pool, dma_addr_t dma)
dma               393 mm/dmapool.c   		if (dma < page->dma)
dma               395 mm/dmapool.c   		if ((dma - page->dma) < pool->allocation)
dma               410 mm/dmapool.c   void dma_pool_free(struct dma_pool *pool, void *vaddr, dma_addr_t dma)
dma               417 mm/dmapool.c   	page = pool_find_page(pool, dma);
dma               423 mm/dmapool.c   				pool->name, vaddr, (unsigned long)dma);
dma               426 mm/dmapool.c   			       pool->name, vaddr, (unsigned long)dma);
dma               434 mm/dmapool.c   	if ((dma - page->dma) != offset) {
dma               439 mm/dmapool.c   				pool->name, vaddr, &dma);
dma               442 mm/dmapool.c   			       pool->name, vaddr, &dma);
dma               455 mm/dmapool.c   					pool->name, &dma);
dma               458 mm/dmapool.c   				       pool->name, &dma);
dma               145 net/core/dev_ioctl.c 		ifr->ifr_map.dma       = dev->dma;
dma               125 net/core/page_pool.c 	dma_addr_t dma;
dma               153 net/core/page_pool.c 	dma = dma_map_page_attrs(pool->p.dev, page, 0,
dma               156 net/core/page_pool.c 	if (dma_mapping_error(pool->p.dev, dma)) {
dma               160 net/core/page_pool.c 	page->dma_addr = dma;
dma               213 net/core/page_pool.c 	dma_addr_t dma;
dma               219 net/core/page_pool.c 	dma = page->dma_addr;
dma               221 net/core/page_pool.c 	dma_unmap_page_attrs(pool->p.dev, dma,
dma              1358 net/core/rtnetlink.c 	map.dma         = dev->dma;
dma              2450 net/core/rtnetlink.c 		k_map.dma = (unsigned char) u_map->dma;
dma              3314 net/socket.c   	err |= get_user(ifr.ifr_map.dma, &uifmap32->dma);
dma              3327 net/socket.c   		err |= put_user(ifr.ifr_map.dma, &uifmap32->dma);
dma               604 net/xdp/xsk.c  			(pgs[i].dma + PAGE_SIZE == pgs[i + 1].dma) :
dma                27 sound/core/isadma.c void snd_dma_program(unsigned long dma,
dma                34 sound/core/isadma.c 	disable_dma(dma);
dma                35 sound/core/isadma.c 	clear_dma_ff(dma);
dma                36 sound/core/isadma.c 	set_dma_mode(dma, mode);
dma                37 sound/core/isadma.c 	set_dma_addr(dma, addr);
dma                38 sound/core/isadma.c 	set_dma_count(dma, size);
dma                40 sound/core/isadma.c 		enable_dma(dma);
dma                51 sound/core/isadma.c void snd_dma_disable(unsigned long dma)
dma                56 sound/core/isadma.c 	clear_dma_ff(dma);
dma                57 sound/core/isadma.c 	disable_dma(dma);
dma                69 sound/core/isadma.c unsigned int snd_dma_pointer(unsigned long dma, unsigned int size)
dma                75 sound/core/isadma.c 	clear_dma_ff(dma);
dma                77 sound/core/isadma.c 		disable_dma(dma);
dma                78 sound/core/isadma.c 	result = get_dma_residue(dma);
dma                84 sound/core/isadma.c 	result1 = get_dma_residue(dma);
dma                86 sound/core/isadma.c 		enable_dma(dma);
dma                92 sound/core/isadma.c 		pr_err("ALSA: pointer (0x%x) for DMA #%ld is greater than transfer size (0x%x)\n", result, dma, size);
dma                86 sound/drivers/pcsp/pcsp.c 	pcsp_chip.dma = -1;
dma                58 sound/drivers/pcsp/pcsp.h 	unsigned short port, irq, dma;
dma                47 sound/isa/ad1848/ad1848.c module_param_hw_array(dma1, int, dma, NULL, 0444);
dma                59 sound/isa/cmi8328.c module_param_hw_array(dma1, int, dma, NULL, 0444);
dma                61 sound/isa/cmi8328.c module_param_hw_array(dma2, int, dma, NULL, 0444);
dma                88 sound/isa/cmi8330.c module_param_hw_array(sbdma8, int, dma, NULL, 0444);
dma                90 sound/isa/cmi8330.c module_param_hw_array(sbdma16, int, dma, NULL, 0444);
dma                97 sound/isa/cmi8330.c module_param_hw_array(wssdma, int, dma, NULL, 0444);
dma                52 sound/isa/cs423x/cs4231.c module_param_hw_array(dma1, int, dma, NULL, 0444);
dma                54 sound/isa/cs423x/cs4231.c module_param_hw_array(dma2, int, dma, NULL, 0444);
dma               100 sound/isa/cs423x/cs4236.c module_param_hw_array(dma1, int, dma, NULL, 0444);
dma               102 sound/isa/cs423x/cs4236.c module_param_hw_array(dma2, int, dma, NULL, 0444);
dma                69 sound/isa/es1688/es1688.c module_param_hw_array(dma8, int, dma, NULL, 0444);
dma               185 sound/isa/es1688/es1688_lib.c 	int cfg, irq_bits, dma, dma_bits, tmp, tmp1;
dma               241 sound/isa/es1688/es1688_lib.c 		dma = chip->dma8;
dma               242 sound/isa/es1688/es1688_lib.c 		if (dma > 3 || dma == 2) {
dma               244 sound/isa/es1688/es1688_lib.c 				   "for ES1688 chip!!\n", chip->port, dma);
dma               251 sound/isa/es1688/es1688_lib.c 			dma_bits = dma;
dma               252 sound/isa/es1688/es1688_lib.c 			if (dma != 3)
dma              1994 sound/isa/es18xx.c module_param_hw_array(dma1, int, dma, NULL, 0444);
dma              1996 sound/isa/es18xx.c module_param_hw_array(dma2, int, dma, NULL, 0444);
dma                55 sound/isa/galaxy/galaxy.c module_param_hw_array(dma1, int, dma, NULL, 0444);
dma                57 sound/isa/galaxy/galaxy.c module_param_hw_array(dma2, int, dma, NULL, 0444);
dma                50 sound/isa/gus/gusclassic.c module_param_hw_array(dma1, int, dma, NULL, 0444);
dma                52 sound/isa/gus/gusclassic.c module_param_hw_array(dma2, int, dma, NULL, 0444);
dma                66 sound/isa/gus/gusextreme.c module_param_hw_array(dma8, int, dma, NULL, 0444);
dma                68 sound/isa/gus/gusextreme.c module_param_hw_array(dma1, int, dma, NULL, 0444);
dma                48 sound/isa/gus/gusmax.c module_param_hw_array(dma1, int, dma, NULL, 0444);
dma                50 sound/isa/gus/gusmax.c module_param_hw_array(dma2, int, dma, NULL, 0444);
dma                88 sound/isa/gus/interwave.c module_param_hw_array(dma1, int, dma, NULL, 0444);
dma                90 sound/isa/gus/interwave.c module_param_hw_array(dma2, int, dma, NULL, 0444);
dma                69 sound/isa/opl3sa2.c module_param_hw_array(dma1, int, dma, NULL, 0444);
dma                71 sound/isa/opl3sa2.c module_param_hw_array(dma2, int, dma, NULL, 0444);
dma                69 sound/isa/opti9xx/miro.c module_param_hw(dma1, int, dma, 0444);
dma                71 sound/isa/opti9xx/miro.c module_param_hw(dma2, int, dma, 0444);
dma                89 sound/isa/opti9xx/opti92x-ad1848.c module_param_hw(dma1, int, dma, 0444);
dma                92 sound/isa/opti9xx/opti92x-ad1848.c module_param_hw(dma2, int, dma, 0444);
dma                61 sound/isa/sb/jazz16.c module_param_hw_array(dma8, int, dma, NULL, 0444);
dma                63 sound/isa/sb/jazz16.c module_param_hw_array(dma16, int, dma, NULL, 0444);
dma                99 sound/isa/sb/sb16.c module_param_hw_array(dma8, int, dma, NULL, 0444);
dma               101 sound/isa/sb/sb16.c module_param_hw_array(dma16, int, dma, NULL, 0444);
dma               253 sound/isa/sb/sb16_main.c 	unsigned int size, count, dma;
dma               264 sound/isa/sb/sb16_main.c 	dma = (chip->mode & SB_MODE_PLAYBACK_8) ? chip->dma8 : chip->dma16;
dma               265 sound/isa/sb/sb16_main.c 	snd_dma_program(dma, runtime->dma_addr, size, DMA_MODE_WRITE | DMA_AUTOINIT);
dma               323 sound/isa/sb/sb16_main.c 	unsigned int size, count, dma;
dma               333 sound/isa/sb/sb16_main.c 	dma = (chip->mode & SB_MODE_CAPTURE_8) ? chip->dma8 : chip->dma16;
dma               334 sound/isa/sb/sb16_main.c 	snd_dma_program(dma, runtime->dma_addr, size, DMA_MODE_READ | DMA_AUTOINIT);
dma               441 sound/isa/sb/sb16_main.c 	unsigned int dma;
dma               444 sound/isa/sb/sb16_main.c 	dma = (chip->mode & SB_MODE_PLAYBACK_8) ? chip->dma8 : chip->dma16;
dma               445 sound/isa/sb/sb16_main.c 	ptr = snd_dma_pointer(dma, chip->p_dma_size);
dma               452 sound/isa/sb/sb16_main.c 	unsigned int dma;
dma               455 sound/isa/sb/sb16_main.c 	dma = (chip->mode & SB_MODE_CAPTURE_8) ? chip->dma8 : chip->dma16;
dma               456 sound/isa/sb/sb16_main.c 	ptr = snd_dma_pointer(dma, chip->c_dma_size);
dma                39 sound/isa/sb/sb8.c module_param_hw_array(dma8, int, dma, NULL, 0444);
dma                98 sound/isa/sb/sb8_main.c 	int dma;
dma               137 sound/isa/sb/sb8_main.c 		dma = chip->dma16;
dma               141 sound/isa/sb/sb8_main.c 		dma = chip->dma8;
dma               159 sound/isa/sb/sb8_main.c 		snd_dma_program(dma, runtime->dma_addr, 1, DMA_MODE_WRITE);
dma               187 sound/isa/sb/sb8_main.c 	snd_dma_program(dma, runtime->dma_addr,
dma               248 sound/isa/sb/sb8_main.c 	int dma;
dma               288 sound/isa/sb/sb8_main.c 		dma = chip->dma16;
dma               292 sound/isa/sb/sb8_main.c 		dma = chip->dma8;
dma               324 sound/isa/sb/sb8_main.c 	snd_dma_program(dma, runtime->dma_addr,
dma               401 sound/isa/sb/sb8_main.c 	int dma;
dma               404 sound/isa/sb/sb8_main.c 		dma = chip->dma8;
dma               406 sound/isa/sb/sb8_main.c 		dma = chip->dma16;
dma               409 sound/isa/sb/sb8_main.c 	ptr = snd_dma_pointer(dma, chip->p_dma_size);
dma               417 sound/isa/sb/sb8_main.c 	int dma;
dma               420 sound/isa/sb/sb8_main.c 		dma = chip->dma8;
dma               422 sound/isa/sb/sb8_main.c 		dma = chip->dma16;
dma               425 sound/isa/sb/sb8_main.c 	ptr = snd_dma_pointer(dma, chip->c_dma_size);
dma                45 sound/isa/sc6000.c static int dma[SNDRV_CARDS] = SNDRV_DEFAULT_DMA;	/* 0, 1, 3 */
dma                64 sound/isa/sc6000.c module_param_hw_array(dma, int, dma, NULL, 0444);
dma                65 sound/isa/sc6000.c MODULE_PARM_DESC(dma, "DMA # for sc-6000 driver.");
dma               140 sound/isa/sc6000.c static unsigned char sc6000_dma_to_softcfg(int dma)
dma               144 sound/isa/sc6000.c 	switch (dma) {
dma               382 sound/isa/sc6000.c 			 sc6000_dma_to_softcfg(dma[dev]);
dma               516 sound/isa/sc6000.c 	if (dma[dev] != SNDRV_AUTO_DMA && !sc6000_dma_to_softcfg(dma[dev])) {
dma               517 sound/isa/sc6000.c 		printk(KERN_ERR PFX "invalid DMA %d\n", dma[dev]);
dma               541 sound/isa/sc6000.c 	int xdma = dma[dev];
dma                42 sound/isa/sscape.c static int dma[SNDRV_CARDS] = SNDRV_DEFAULT_DMA;
dma                64 sound/isa/sscape.c module_param_hw_array(dma, int, dma, NULL, 0444);
dma                65 sound/isa/sscape.c MODULE_PARM_DESC(dma, "DMA # for SoundScape driver.");
dma                67 sound/isa/sscape.c module_param_hw_array(dma2, int, dma, NULL, 0444);
dma               429 sound/isa/sscape.c 	struct snd_dma_buffer dma;
dma               433 sound/isa/sscape.c 	if (!get_dmabuf(s, &dma, PAGE_ALIGN(32 * 1024)))
dma               464 sound/isa/sscape.c 		len = min(size, dma.bytes);
dma               465 sound/isa/sscape.c 		memcpy(dma.area, data, len);
dma               469 sound/isa/sscape.c 		snd_dma_program(s->chip->dma1, dma.addr, len, DMA_MODE_WRITE);
dma               515 sound/isa/sscape.c 	free_dmabuf(&dma);
dma               964 sound/isa/sscape.c 	err = request_dma(dma[dev], "SoundScape");
dma               966 sound/isa/sscape.c 		snd_printk(KERN_ERR "sscape: can't grab DMA %d\n", dma[dev]);
dma              1001 sound/isa/sscape.c 			 name, sscape->io_base, irq[dev], dma[dev]);
dma              1045 sound/isa/sscape.c 			    | (dma[dev] << 4) | (irq_cfg << 1));
dma              1058 sound/isa/sscape.c 			    dma[dev], dma2[dev]);
dma              1123 sound/isa/sscape.c 	free_dma(dma[dev]);
dma              1143 sound/isa/sscape.c 	    dma[i] == SNDRV_AUTO_DMA) {
dma              1167 sound/isa/sscape.c 	dma[dev] &= 0x03;
dma              1270 sound/isa/sscape.c 	dma[idx] = pnp_dma(dev, 0) & 0x03;
dma              1272 sound/isa/sscape.c 		dma2[idx] = dma[idx];
dma                57 sound/isa/wavefront/wavefront.c module_param_hw_array(dma1, int, dma, NULL, 0444);
dma                59 sound/isa/wavefront/wavefront.c module_param_hw_array(dma2, int, dma, NULL, 0444);
dma               574 sound/parisc/harmony.c 	if (err > 0 && h->dma.type == SNDRV_DMA_TYPE_CONTINUOUS)
dma               637 sound/parisc/harmony.c 	h->dma.type = SNDRV_DMA_TYPE_DEV;
dma               638 sound/parisc/harmony.c 	h->dma.dev = &h->dev->dev;
dma               639 sound/parisc/harmony.c 	err = snd_dma_alloc_pages(h->dma.type,
dma               640 sound/parisc/harmony.c 				  h->dma.dev,
dma               649 sound/parisc/harmony.c 	err = snd_dma_alloc_pages(h->dma.type,
dma               650 sound/parisc/harmony.c 				  h->dma.dev,
dma               659 sound/parisc/harmony.c 	snd_pcm_lib_preallocate_pages_for_all(pcm, h->dma.type, h->dma.dev,
dma                34 sound/parisc/harmony.h         struct snd_dma_device dma; /* playback/capture */
dma               342 sound/pci/atiixp.c static int atiixp_build_dma_packets(struct atiixp *chip, struct atiixp_dma *dma,
dma               354 sound/pci/atiixp.c 	if (dma->desc_buf.area == NULL) {
dma               358 sound/pci/atiixp.c 					&dma->desc_buf) < 0)
dma               360 sound/pci/atiixp.c 		dma->period_bytes = dma->periods = 0; /* clear */
dma               363 sound/pci/atiixp.c 	if (dma->periods == periods && dma->period_bytes == period_bytes)
dma               368 sound/pci/atiixp.c 	writel(0, chip->remap_addr + dma->ops->llp_offset);
dma               369 sound/pci/atiixp.c 	dma->ops->enable_dma(chip, 0);
dma               370 sound/pci/atiixp.c 	dma->ops->enable_dma(chip, 1);
dma               375 sound/pci/atiixp.c 	desc_addr = (u32)dma->desc_buf.addr;
dma               378 sound/pci/atiixp.c 		desc = &((struct atiixp_dma_desc *)dma->desc_buf.area)[i];
dma               384 sound/pci/atiixp.c 			desc->next = cpu_to_le32((u32)dma->desc_buf.addr);
dma               390 sound/pci/atiixp.c 	writel((u32)dma->desc_buf.addr | ATI_REG_LINKPTR_EN,
dma               391 sound/pci/atiixp.c 	       chip->remap_addr + dma->ops->llp_offset);
dma               393 sound/pci/atiixp.c 	dma->period_bytes = period_bytes;
dma               394 sound/pci/atiixp.c 	dma->periods = periods;
dma               402 sound/pci/atiixp.c static void atiixp_clear_dma_packets(struct atiixp *chip, struct atiixp_dma *dma,
dma               405 sound/pci/atiixp.c 	if (dma->desc_buf.area) {
dma               406 sound/pci/atiixp.c 		writel(0, chip->remap_addr + dma->ops->llp_offset);
dma               407 sound/pci/atiixp.c 		snd_dma_free_pages(&dma->desc_buf);
dma               408 sound/pci/atiixp.c 		dma->desc_buf.area = NULL;
dma               651 sound/pci/atiixp.c 	struct atiixp_dma *dma = runtime->private_data;
dma               656 sound/pci/atiixp.c 		curptr = readl(chip->remap_addr + dma->ops->dt_cur);
dma               657 sound/pci/atiixp.c 		if (curptr < dma->buf_addr)
dma               659 sound/pci/atiixp.c 		curptr -= dma->buf_addr;
dma               660 sound/pci/atiixp.c 		if (curptr >= dma->buf_bytes)
dma               665 sound/pci/atiixp.c 		   readl(chip->remap_addr + dma->ops->dt_cur), dma->buf_addr);
dma               672 sound/pci/atiixp.c static void snd_atiixp_xrun_dma(struct atiixp *chip, struct atiixp_dma *dma)
dma               674 sound/pci/atiixp.c 	if (! dma->substream || ! dma->running)
dma               676 sound/pci/atiixp.c 	dev_dbg(chip->card->dev, "XRUN detected (DMA %d)\n", dma->ops->type);
dma               677 sound/pci/atiixp.c 	snd_pcm_stop_xrun(dma->substream);
dma               683 sound/pci/atiixp.c static void snd_atiixp_update_dma(struct atiixp *chip, struct atiixp_dma *dma)
dma               685 sound/pci/atiixp.c 	if (! dma->substream || ! dma->running)
dma               687 sound/pci/atiixp.c 	snd_pcm_period_elapsed(dma->substream);
dma               710 sound/pci/atiixp.c 	struct atiixp_dma *dma = substream->runtime->private_data;
dma               713 sound/pci/atiixp.c 	if (snd_BUG_ON(!dma->ops->enable_transfer ||
dma               714 sound/pci/atiixp.c 		       !dma->ops->flush_dma))
dma               722 sound/pci/atiixp.c 		if (dma->running && dma->suspended &&
dma               724 sound/pci/atiixp.c 			writel(dma->saved_curptr, chip->remap_addr +
dma               725 sound/pci/atiixp.c 			       dma->ops->dt_cur);
dma               726 sound/pci/atiixp.c 		dma->ops->enable_transfer(chip, 1);
dma               727 sound/pci/atiixp.c 		dma->running = 1;
dma               728 sound/pci/atiixp.c 		dma->suspended = 0;
dma               733 sound/pci/atiixp.c 		dma->suspended = cmd == SNDRV_PCM_TRIGGER_SUSPEND;
dma               734 sound/pci/atiixp.c 		if (dma->running && dma->suspended)
dma               735 sound/pci/atiixp.c 			dma->saved_curptr = readl(chip->remap_addr +
dma               736 sound/pci/atiixp.c 						  dma->ops->dt_cur);
dma               737 sound/pci/atiixp.c 		dma->ops->enable_transfer(chip, 0);
dma               738 sound/pci/atiixp.c 		dma->running = 0;
dma               747 sound/pci/atiixp.c 			dma->ops->flush_dma(chip);
dma               952 sound/pci/atiixp.c 	struct atiixp_dma *dma = substream->runtime->private_data;
dma               958 sound/pci/atiixp.c 	dma->buf_addr = substream->runtime->dma_addr;
dma               959 sound/pci/atiixp.c 	dma->buf_bytes = params_buffer_bytes(hw_params);
dma               961 sound/pci/atiixp.c 	err = atiixp_build_dma_packets(chip, dma, substream,
dma               967 sound/pci/atiixp.c 	if (dma->ac97_pcm_type >= 0) {
dma               968 sound/pci/atiixp.c 		struct ac97_pcm *pcm = chip->pcms[dma->ac97_pcm_type];
dma               972 sound/pci/atiixp.c 		if (dma->pcm_open_flag) {
dma               974 sound/pci/atiixp.c 			dma->pcm_open_flag = 0;
dma               980 sound/pci/atiixp.c 			dma->pcm_open_flag = 1;
dma               989 sound/pci/atiixp.c 	struct atiixp_dma *dma = substream->runtime->private_data;
dma               991 sound/pci/atiixp.c 	if (dma->pcm_open_flag) {
dma               992 sound/pci/atiixp.c 		struct ac97_pcm *pcm = chip->pcms[dma->ac97_pcm_type];
dma               994 sound/pci/atiixp.c 		dma->pcm_open_flag = 0;
dma               996 sound/pci/atiixp.c 	atiixp_clear_dma_packets(chip, dma, substream);
dma              1026 sound/pci/atiixp.c 			       struct atiixp_dma *dma, int pcm_type)
dma              1032 sound/pci/atiixp.c 	if (snd_BUG_ON(!dma->ops || !dma->ops->enable_dma))
dma              1035 sound/pci/atiixp.c 	if (dma->opened)
dma              1037 sound/pci/atiixp.c 	dma->substream = substream;
dma              1039 sound/pci/atiixp.c 	dma->ac97_pcm_type = pcm_type;
dma              1049 sound/pci/atiixp.c 	runtime->private_data = dma;
dma              1053 sound/pci/atiixp.c 	dma->ops->enable_dma(chip, 1);
dma              1055 sound/pci/atiixp.c 	dma->opened = 1;
dma              1061 sound/pci/atiixp.c 				struct atiixp_dma *dma)
dma              1065 sound/pci/atiixp.c 	if (snd_BUG_ON(!dma->ops || !dma->ops->enable_dma))
dma              1068 sound/pci/atiixp.c 	dma->ops->enable_dma(chip, 0);
dma              1070 sound/pci/atiixp.c 	dma->substream = NULL;
dma              1071 sound/pci/atiixp.c 	dma->opened = 0;
dma              1496 sound/pci/atiixp.c 			struct atiixp_dma *dma = &chip->dmas[i];
dma              1497 sound/pci/atiixp.c 			if (dma->substream && dma->suspended) {
dma              1498 sound/pci/atiixp.c 				dma->ops->enable_dma(chip, 1);
dma              1499 sound/pci/atiixp.c 				dma->substream->ops->prepare(dma->substream);
dma              1500 sound/pci/atiixp.c 				writel((u32)dma->desc_buf.addr | ATI_REG_LINKPTR_EN,
dma              1501 sound/pci/atiixp.c 				       chip->remap_addr + dma->ops->llp_offset);
dma               311 sound/pci/atiixp_modem.c 				    struct atiixp_dma *dma,
dma               323 sound/pci/atiixp_modem.c 	if (dma->desc_buf.area == NULL) {
dma               325 sound/pci/atiixp_modem.c 					ATI_DESC_LIST_SIZE, &dma->desc_buf) < 0)
dma               327 sound/pci/atiixp_modem.c 		dma->period_bytes = dma->periods = 0; /* clear */
dma               330 sound/pci/atiixp_modem.c 	if (dma->periods == periods && dma->period_bytes == period_bytes)
dma               335 sound/pci/atiixp_modem.c 	writel(0, chip->remap_addr + dma->ops->llp_offset);
dma               336 sound/pci/atiixp_modem.c 	dma->ops->enable_dma(chip, 0);
dma               337 sound/pci/atiixp_modem.c 	dma->ops->enable_dma(chip, 1);
dma               342 sound/pci/atiixp_modem.c 	desc_addr = (u32)dma->desc_buf.addr;
dma               345 sound/pci/atiixp_modem.c 		desc = &((struct atiixp_dma_desc *)dma->desc_buf.area)[i];
dma               351 sound/pci/atiixp_modem.c 			desc->next = cpu_to_le32((u32)dma->desc_buf.addr);
dma               357 sound/pci/atiixp_modem.c 	writel((u32)dma->desc_buf.addr | ATI_REG_LINKPTR_EN,
dma               358 sound/pci/atiixp_modem.c 	       chip->remap_addr + dma->ops->llp_offset);
dma               360 sound/pci/atiixp_modem.c 	dma->period_bytes = period_bytes;
dma               361 sound/pci/atiixp_modem.c 	dma->periods = periods;
dma               370 sound/pci/atiixp_modem.c 				     struct atiixp_dma *dma,
dma               373 sound/pci/atiixp_modem.c 	if (dma->desc_buf.area) {
dma               374 sound/pci/atiixp_modem.c 		writel(0, chip->remap_addr + dma->ops->llp_offset);
dma               375 sound/pci/atiixp_modem.c 		snd_dma_free_pages(&dma->desc_buf);
dma               376 sound/pci/atiixp_modem.c 		dma->desc_buf.area = NULL;
dma               600 sound/pci/atiixp_modem.c 	struct atiixp_dma *dma = runtime->private_data;
dma               605 sound/pci/atiixp_modem.c 		curptr = readl(chip->remap_addr + dma->ops->dt_cur);
dma               606 sound/pci/atiixp_modem.c 		if (curptr < dma->buf_addr)
dma               608 sound/pci/atiixp_modem.c 		curptr -= dma->buf_addr;
dma               609 sound/pci/atiixp_modem.c 		if (curptr >= dma->buf_bytes)
dma               614 sound/pci/atiixp_modem.c 		   readl(chip->remap_addr + dma->ops->dt_cur), dma->buf_addr);
dma               622 sound/pci/atiixp_modem.c 				struct atiixp_dma *dma)
dma               624 sound/pci/atiixp_modem.c 	if (! dma->substream || ! dma->running)
dma               626 sound/pci/atiixp_modem.c 	dev_dbg(chip->card->dev, "XRUN detected (DMA %d)\n", dma->ops->type);
dma               627 sound/pci/atiixp_modem.c 	snd_pcm_stop_xrun(dma->substream);
dma               634 sound/pci/atiixp_modem.c 				  struct atiixp_dma *dma)
dma               636 sound/pci/atiixp_modem.c 	if (! dma->substream || ! dma->running)
dma               638 sound/pci/atiixp_modem.c 	snd_pcm_period_elapsed(dma->substream);
dma               660 sound/pci/atiixp_modem.c 	struct atiixp_dma *dma = substream->runtime->private_data;
dma               663 sound/pci/atiixp_modem.c 	if (snd_BUG_ON(!dma->ops->enable_transfer ||
dma               664 sound/pci/atiixp_modem.c 		       !dma->ops->flush_dma))
dma               670 sound/pci/atiixp_modem.c 		dma->ops->enable_transfer(chip, 1);
dma               671 sound/pci/atiixp_modem.c 		dma->running = 1;
dma               674 sound/pci/atiixp_modem.c 		dma->ops->enable_transfer(chip, 0);
dma               675 sound/pci/atiixp_modem.c 		dma->running = 0;
dma               684 sound/pci/atiixp_modem.c 		dma->ops->flush_dma(chip);
dma               782 sound/pci/atiixp_modem.c 	struct atiixp_dma *dma = substream->runtime->private_data;
dma               789 sound/pci/atiixp_modem.c 	dma->buf_addr = substream->runtime->dma_addr;
dma               790 sound/pci/atiixp_modem.c 	dma->buf_bytes = params_buffer_bytes(hw_params);
dma               792 sound/pci/atiixp_modem.c 	err = atiixp_build_dma_packets(chip, dma, substream,
dma               812 sound/pci/atiixp_modem.c 	struct atiixp_dma *dma = substream->runtime->private_data;
dma               814 sound/pci/atiixp_modem.c 	atiixp_clear_dma_packets(chip, dma, substream);
dma               844 sound/pci/atiixp_modem.c 			       struct atiixp_dma *dma, int pcm_type)
dma               856 sound/pci/atiixp_modem.c 	if (snd_BUG_ON(!dma->ops || !dma->ops->enable_dma))
dma               859 sound/pci/atiixp_modem.c 	if (dma->opened)
dma               861 sound/pci/atiixp_modem.c 	dma->substream = substream;
dma               863 sound/pci/atiixp_modem.c 	dma->ac97_pcm_type = pcm_type;
dma               871 sound/pci/atiixp_modem.c 	runtime->private_data = dma;
dma               875 sound/pci/atiixp_modem.c 	dma->ops->enable_dma(chip, 1);
dma               877 sound/pci/atiixp_modem.c 	dma->opened = 1;
dma               883 sound/pci/atiixp_modem.c 				struct atiixp_dma *dma)
dma               887 sound/pci/atiixp_modem.c 	if (snd_BUG_ON(!dma->ops || !dma->ops->enable_dma))
dma               890 sound/pci/atiixp_modem.c 	dma->ops->enable_dma(chip, 0);
dma               892 sound/pci/atiixp_modem.c 	dma->substream = NULL;
dma               893 sound/pci/atiixp_modem.c 	dma->opened = 0;
dma                99 sound/pci/au88x0/au88x0.h 	int dma;
dma               116 sound/pci/au88x0/au88x0.h 	int dma;		/* Hardware DMA index. */
dma               236 sound/pci/au88x0/au88x0.h static int vortex_adb_allocroute(vortex_t * vortex, int dma, int nr_ch,
dma               241 sound/pci/au88x0/au88x0.h static int vortex_wt_allocroute(vortex_t * vortex, int dma, int nr_ch);
dma              1072 sound/pci/au88x0/au88x0_core.c 	stream_t *dma = &vortex->dma_adb[adbdma];
dma              1075 sound/pci/au88x0/au88x0_core.c 		dma->dma_ctrl);
dma              1080 sound/pci/au88x0/au88x0_core.c 	stream_t *dma = &vortex->dma_adb[adbdma];
dma              1084 sound/pci/au88x0/au88x0_core.c 	dma->period_real = dma->period_virt = sb;
dma              1091 sound/pci/au88x0/au88x0_core.c 	stream_t *dma = &vortex->dma_adb[adbdma];
dma              1093 sound/pci/au88x0/au88x0_core.c 	dma->period_bytes = psize;
dma              1094 sound/pci/au88x0/au88x0_core.c 	dma->nr_periods = count;
dma              1096 sound/pci/au88x0/au88x0_core.c 	dma->cfg0 = 0;
dma              1097 sound/pci/au88x0/au88x0_core.c 	dma->cfg1 = 0;
dma              1102 sound/pci/au88x0/au88x0_core.c 		dma->cfg1 |= 0x88000000 | 0x44000000 | 0x30000000 | (psize - 1);
dma              1105 sound/pci/au88x0/au88x0_core.c 			snd_pcm_sgbuf_get_addr(dma->substream, psize * 3));
dma              1109 sound/pci/au88x0/au88x0_core.c 		dma->cfg0 |= 0x12000000;
dma              1110 sound/pci/au88x0/au88x0_core.c 		dma->cfg1 |= 0x80000000 | 0x40000000 | ((psize - 1) << 0xc);
dma              1113 sound/pci/au88x0/au88x0_core.c 			snd_pcm_sgbuf_get_addr(dma->substream, psize * 2));
dma              1117 sound/pci/au88x0/au88x0_core.c 		dma->cfg0 |= 0x88000000 | 0x44000000 | 0x10000000 | (psize - 1);
dma              1120 sound/pci/au88x0/au88x0_core.c 			snd_pcm_sgbuf_get_addr(dma->substream, psize));
dma              1124 sound/pci/au88x0/au88x0_core.c 		dma->cfg0 |= 0x80000000 | 0x40000000 | ((psize - 1) << 0xc);
dma              1127 sound/pci/au88x0/au88x0_core.c 			snd_pcm_sgbuf_get_addr(dma->substream, 0));
dma              1134 sound/pci/au88x0/au88x0_core.c 	hwwrite(vortex->mmio, VORTEX_ADBDMA_BUFCFG0 + (adbdma << 3), dma->cfg0);
dma              1135 sound/pci/au88x0/au88x0_core.c 	hwwrite(vortex->mmio, VORTEX_ADBDMA_BUFCFG1 + (adbdma << 3), dma->cfg1);
dma              1145 sound/pci/au88x0/au88x0_core.c 	stream_t *dma = &vortex->dma_adb[adbdma];
dma              1147 sound/pci/au88x0/au88x0_core.c 	dma->dma_unknown = stereo;
dma              1148 sound/pci/au88x0/au88x0_core.c 	dma->dma_ctrl =
dma              1149 sound/pci/au88x0/au88x0_core.c 	    ((offset & OFFSET_MASK) | (dma->dma_ctrl & ~OFFSET_MASK));
dma              1151 sound/pci/au88x0/au88x0_core.c 	dma->dma_ctrl =
dma              1152 sound/pci/au88x0/au88x0_core.c 	    (dma->dma_ctrl & ~IE_MASK) | ((ie << IE_SHIFT) & IE_MASK);
dma              1154 sound/pci/au88x0/au88x0_core.c 	dma->dma_ctrl =
dma              1155 sound/pci/au88x0/au88x0_core.c 	    (dma->dma_ctrl & ~DIR_MASK) | ((dir << DIR_SHIFT) & DIR_MASK);
dma              1156 sound/pci/au88x0/au88x0_core.c 	dma->dma_ctrl =
dma              1157 sound/pci/au88x0/au88x0_core.c 	    (dma->dma_ctrl & ~FMT_MASK) | ((fmt << FMT_SHIFT) & FMT_MASK);
dma              1160 sound/pci/au88x0/au88x0_core.c 		dma->dma_ctrl);
dma              1166 sound/pci/au88x0/au88x0_core.c 	stream_t *dma = &vortex->dma_adb[adbdma];
dma              1172 sound/pci/au88x0/au88x0_core.c 	if (dma->nr_periods >= 4)
dma              1173 sound/pci/au88x0/au88x0_core.c 		delta = (page - dma->period_real) & 3;
dma              1175 sound/pci/au88x0/au88x0_core.c 		delta = (page - dma->period_real);
dma              1177 sound/pci/au88x0/au88x0_core.c 			delta += dma->nr_periods;
dma              1183 sound/pci/au88x0/au88x0_core.c 	if (dma->nr_periods > 4) {
dma              1186 sound/pci/au88x0/au88x0_core.c 			p = dma->period_virt + i + 4;
dma              1187 sound/pci/au88x0/au88x0_core.c 			if (p >= dma->nr_periods)
dma              1188 sound/pci/au88x0/au88x0_core.c 				p -= dma->nr_periods;
dma              1190 sound/pci/au88x0/au88x0_core.c 			pp = dma->period_real + i;
dma              1196 sound/pci/au88x0/au88x0_core.c 				snd_pcm_sgbuf_get_addr(dma->substream,
dma              1197 sound/pci/au88x0/au88x0_core.c 				dma->period_bytes * p));
dma              1203 sound/pci/au88x0/au88x0_core.c 	dma->period_virt += delta;
dma              1204 sound/pci/au88x0/au88x0_core.c 	dma->period_real = page;
dma              1205 sound/pci/au88x0/au88x0_core.c 	if (dma->period_virt >= dma->nr_periods)
dma              1206 sound/pci/au88x0/au88x0_core.c 		dma->period_virt -= dma->nr_periods;
dma              1210 sound/pci/au88x0/au88x0_core.c 			 adbdma, dma->period_virt, dma->period_real, delta);
dma              1217 sound/pci/au88x0/au88x0_core.c 	stream_t *dma = &vortex->dma_adb[adbdma];
dma              1221 sound/pci/au88x0/au88x0_core.c 	for (i=0 ; i < 4 && i < dma->nr_periods; i++) {
dma              1223 sound/pci/au88x0/au88x0_core.c 		p = dma->period_virt + i;
dma              1224 sound/pci/au88x0/au88x0_core.c 		if (p >= dma->nr_periods)
dma              1225 sound/pci/au88x0/au88x0_core.c 			p -= dma->nr_periods;
dma              1227 sound/pci/au88x0/au88x0_core.c 		pp = dma->period_real + i;
dma              1228 sound/pci/au88x0/au88x0_core.c 		if (dma->nr_periods < 4) {
dma              1229 sound/pci/au88x0/au88x0_core.c 			if (pp >= dma->nr_periods)
dma              1230 sound/pci/au88x0/au88x0_core.c 				pp -= dma->nr_periods;
dma              1238 sound/pci/au88x0/au88x0_core.c 			snd_pcm_sgbuf_get_addr(dma->substream,
dma              1239 sound/pci/au88x0/au88x0_core.c 					       dma->period_bytes * p));
dma              1247 sound/pci/au88x0/au88x0_core.c 	stream_t *dma = &vortex->dma_adb[adbdma];
dma              1252 sound/pci/au88x0/au88x0_core.c 	if (dma->nr_periods >= 4)
dma              1253 sound/pci/au88x0/au88x0_core.c 		delta = (page - dma->period_real) & 3;
dma              1255 sound/pci/au88x0/au88x0_core.c 		delta = (page - dma->period_real);
dma              1257 sound/pci/au88x0/au88x0_core.c 			delta += dma->nr_periods;
dma              1259 sound/pci/au88x0/au88x0_core.c 	return (dma->period_virt + delta) * dma->period_bytes
dma              1260 sound/pci/au88x0/au88x0_core.c 		+ (temp & (dma->period_bytes - 1));
dma              1266 sound/pci/au88x0/au88x0_core.c 	stream_t *dma = &vortex->dma_adb[adbdma];
dma              1268 sound/pci/au88x0/au88x0_core.c 	switch (dma->fifo_status) {
dma              1271 sound/pci/au88x0/au88x0_core.c 					dma->fifo_enabled ? 1 : 0);
dma              1276 sound/pci/au88x0/au88x0_core.c 			dma->dma_ctrl);
dma              1277 sound/pci/au88x0/au88x0_core.c 		vortex_fifo_setadbctrl(vortex, adbdma, dma->dma_unknown,
dma              1279 sound/pci/au88x0/au88x0_core.c 				       dma->fifo_enabled ? 1 : 0, 0);
dma              1282 sound/pci/au88x0/au88x0_core.c 		vortex_fifo_setadbctrl(vortex, adbdma, dma->dma_unknown,
dma              1284 sound/pci/au88x0/au88x0_core.c 				       dma->fifo_enabled ? 1 : 0, 0);
dma              1287 sound/pci/au88x0/au88x0_core.c 	dma->fifo_status = FIFO_START;
dma              1292 sound/pci/au88x0/au88x0_core.c 	stream_t *dma = &vortex->dma_adb[adbdma];
dma              1295 sound/pci/au88x0/au88x0_core.c 	switch (dma->fifo_status) {
dma              1298 sound/pci/au88x0/au88x0_core.c 			dma->dma_ctrl);
dma              1299 sound/pci/au88x0/au88x0_core.c 		vortex_fifo_setadbctrl(vortex, adbdma, dma->dma_unknown,
dma              1301 sound/pci/au88x0/au88x0_core.c 				       dma->fifo_enabled ? 1 : 0, 0);
dma              1304 sound/pci/au88x0/au88x0_core.c 		vortex_fifo_setadbctrl(vortex, adbdma, dma->dma_unknown,
dma              1306 sound/pci/au88x0/au88x0_core.c 				       dma->fifo_enabled ? 1 : 0, 0);
dma              1309 sound/pci/au88x0/au88x0_core.c 	dma->fifo_status = FIFO_START;
dma              1314 sound/pci/au88x0/au88x0_core.c 	stream_t *dma = &vortex->dma_adb[adbdma];
dma              1317 sound/pci/au88x0/au88x0_core.c 	switch (dma->fifo_status) {
dma              1319 sound/pci/au88x0/au88x0_core.c 		vortex_fifo_setadbctrl(vortex, adbdma, dma->dma_unknown,
dma              1324 sound/pci/au88x0/au88x0_core.c 			dma->dma_ctrl);
dma              1325 sound/pci/au88x0/au88x0_core.c 		vortex_fifo_setadbctrl(vortex, adbdma, dma->dma_unknown,
dma              1329 sound/pci/au88x0/au88x0_core.c 	dma->fifo_status = FIFO_PAUSE;
dma              1334 sound/pci/au88x0/au88x0_core.c 	stream_t *dma = &vortex->dma_adb[adbdma];
dma              1337 sound/pci/au88x0/au88x0_core.c 	if (dma->fifo_status == FIFO_START)
dma              1338 sound/pci/au88x0/au88x0_core.c 		vortex_fifo_setadbctrl(vortex, adbdma, dma->dma_unknown,
dma              1340 sound/pci/au88x0/au88x0_core.c 	else if (dma->fifo_status == FIFO_STOP)
dma              1342 sound/pci/au88x0/au88x0_core.c 	dma->fifo_status = FIFO_STOP;
dma              1343 sound/pci/au88x0/au88x0_core.c 	dma->fifo_enabled = 0;
dma              1352 sound/pci/au88x0/au88x0_core.c 	stream_t *dma = &vortex->dma_wt[wtdma];
dma              1354 sound/pci/au88x0/au88x0_core.c 	hwwrite(vortex->mmio, VORTEX_WTDMA_CTRL + (wtdma << 2), dma->dma_ctrl);
dma              1359 sound/pci/au88x0/au88x0_core.c 	stream_t *dma = &vortex->dma_wt[wtdma];
dma              1363 sound/pci/au88x0/au88x0_core.c 	dma->period_real = dma->period_virt = sb;
dma              1370 sound/pci/au88x0/au88x0_core.c 	stream_t *dma = &vortex->dma_wt[wtdma];
dma              1372 sound/pci/au88x0/au88x0_core.c 	dma->period_bytes = psize;
dma              1373 sound/pci/au88x0/au88x0_core.c 	dma->nr_periods = count;
dma              1375 sound/pci/au88x0/au88x0_core.c 	dma->cfg0 = 0;
dma              1376 sound/pci/au88x0/au88x0_core.c 	dma->cfg1 = 0;
dma              1381 sound/pci/au88x0/au88x0_core.c 		dma->cfg1 |= 0x88000000 | 0x44000000 | 0x30000000 | (psize-1);
dma              1383 sound/pci/au88x0/au88x0_core.c 			snd_pcm_sgbuf_get_addr(dma->substream, psize * 3));
dma              1387 sound/pci/au88x0/au88x0_core.c 		dma->cfg0 |= 0x12000000;
dma              1388 sound/pci/au88x0/au88x0_core.c 		dma->cfg1 |= 0x80000000 | 0x40000000 | ((psize-1) << 0xc);
dma              1390 sound/pci/au88x0/au88x0_core.c 			snd_pcm_sgbuf_get_addr(dma->substream, psize * 2));
dma              1394 sound/pci/au88x0/au88x0_core.c 		dma->cfg0 |= 0x88000000 | 0x44000000 | 0x10000000 | (psize-1);
dma              1396 sound/pci/au88x0/au88x0_core.c 			snd_pcm_sgbuf_get_addr(dma->substream, psize));
dma              1400 sound/pci/au88x0/au88x0_core.c 		dma->cfg0 |= 0x80000000 | 0x40000000 | ((psize-1) << 0xc);
dma              1402 sound/pci/au88x0/au88x0_core.c 			snd_pcm_sgbuf_get_addr(dma->substream, 0));
dma              1405 sound/pci/au88x0/au88x0_core.c 	hwwrite(vortex->mmio, VORTEX_WTDMA_BUFCFG0 + (wtdma << 3), dma->cfg0);
dma              1406 sound/pci/au88x0/au88x0_core.c 	hwwrite(vortex->mmio, VORTEX_WTDMA_BUFCFG1 + (wtdma << 3), dma->cfg1);
dma              1416 sound/pci/au88x0/au88x0_core.c 	stream_t *dma = &vortex->dma_wt[wtdma];
dma              1419 sound/pci/au88x0/au88x0_core.c 	dma->dma_unknown = d;
dma              1420 sound/pci/au88x0/au88x0_core.c 	dma->dma_ctrl = 0;
dma              1421 sound/pci/au88x0/au88x0_core.c 	dma->dma_ctrl =
dma              1422 sound/pci/au88x0/au88x0_core.c 	    ((offset & OFFSET_MASK) | (dma->dma_ctrl & ~OFFSET_MASK));
dma              1424 sound/pci/au88x0/au88x0_core.c 	dma->dma_ctrl =
dma              1425 sound/pci/au88x0/au88x0_core.c 	    (dma->dma_ctrl & ~IE_MASK) | ((ie << IE_SHIFT) & IE_MASK);
dma              1427 sound/pci/au88x0/au88x0_core.c 	dma->dma_ctrl |= (1 << DIR_SHIFT);
dma              1429 sound/pci/au88x0/au88x0_core.c 	dma->dma_ctrl =
dma              1430 sound/pci/au88x0/au88x0_core.c 	    (dma->dma_ctrl & FMT_MASK) | ((fmt << FMT_SHIFT) & FMT_MASK);
dma              1432 sound/pci/au88x0/au88x0_core.c 	hwwrite(vortex->mmio, VORTEX_WTDMA_CTRL + (wtdma << 2), dma->dma_ctrl);
dma              1437 sound/pci/au88x0/au88x0_core.c 	stream_t *dma = &vortex->dma_wt[wtdma];
dma              1443 sound/pci/au88x0/au88x0_core.c 	if (dma->nr_periods >= 4)
dma              1444 sound/pci/au88x0/au88x0_core.c 		delta = (page - dma->period_real) & 3;
dma              1446 sound/pci/au88x0/au88x0_core.c 		delta = (page - dma->period_real);
dma              1448 sound/pci/au88x0/au88x0_core.c 			delta += dma->nr_periods;
dma              1454 sound/pci/au88x0/au88x0_core.c 	if (dma->nr_periods > 4) {
dma              1457 sound/pci/au88x0/au88x0_core.c 			p = dma->period_virt + i + 4;
dma              1458 sound/pci/au88x0/au88x0_core.c 			if (p >= dma->nr_periods)
dma              1459 sound/pci/au88x0/au88x0_core.c 				p -= dma->nr_periods;
dma              1461 sound/pci/au88x0/au88x0_core.c 			pp = dma->period_real + i;
dma              1467 sound/pci/au88x0/au88x0_core.c 				snd_pcm_sgbuf_get_addr(dma->substream,
dma              1468 sound/pci/au88x0/au88x0_core.c 						       dma->period_bytes * p));
dma              1474 sound/pci/au88x0/au88x0_core.c 	dma->period_virt += delta;
dma              1475 sound/pci/au88x0/au88x0_core.c 	if (dma->period_virt >= dma->nr_periods)
dma              1476 sound/pci/au88x0/au88x0_core.c 		dma->period_virt -= dma->nr_periods;
dma              1477 sound/pci/au88x0/au88x0_core.c 	dma->period_real = page;
dma              1481 sound/pci/au88x0/au88x0_core.c 			 dma->period_virt, delta);
dma              1504 sound/pci/au88x0/au88x0_core.c 	stream_t *dma = &vortex->dma_wt[wtdma];
dma              1508 sound/pci/au88x0/au88x0_core.c 	temp = (dma->period_virt * dma->period_bytes) + (temp & (dma->period_bytes - 1));
dma              1514 sound/pci/au88x0/au88x0_core.c 	stream_t *dma = &vortex->dma_wt[wtdma];
dma              1517 sound/pci/au88x0/au88x0_core.c 	switch (dma->fifo_status) {
dma              1520 sound/pci/au88x0/au88x0_core.c 				       dma->fifo_enabled ? 1 : 0);
dma              1525 sound/pci/au88x0/au88x0_core.c 			dma->dma_ctrl);
dma              1526 sound/pci/au88x0/au88x0_core.c 		vortex_fifo_setwtctrl(vortex, wtdma, dma->dma_unknown,
dma              1528 sound/pci/au88x0/au88x0_core.c 				      dma->fifo_enabled ? 1 : 0, 0);
dma              1531 sound/pci/au88x0/au88x0_core.c 		vortex_fifo_setwtctrl(vortex, wtdma, dma->dma_unknown,
dma              1533 sound/pci/au88x0/au88x0_core.c 				      dma->fifo_enabled ? 1 : 0, 0);
dma              1536 sound/pci/au88x0/au88x0_core.c 	dma->fifo_status = FIFO_START;
dma              1541 sound/pci/au88x0/au88x0_core.c 	stream_t *dma = &vortex->dma_wt[wtdma];
dma              1544 sound/pci/au88x0/au88x0_core.c 	switch (dma->fifo_status) {
dma              1547 sound/pci/au88x0/au88x0_core.c 			dma->dma_ctrl);
dma              1548 sound/pci/au88x0/au88x0_core.c 		vortex_fifo_setwtctrl(vortex, wtdma, dma->dma_unknown,
dma              1550 sound/pci/au88x0/au88x0_core.c 				      dma->fifo_enabled ? 1 : 0, 0);
dma              1553 sound/pci/au88x0/au88x0_core.c 		vortex_fifo_setwtctrl(vortex, wtdma, dma->dma_unknown,
dma              1555 sound/pci/au88x0/au88x0_core.c 				      dma->fifo_enabled ? 1 : 0, 0);
dma              1558 sound/pci/au88x0/au88x0_core.c 	dma->fifo_status = FIFO_START;
dma              1563 sound/pci/au88x0/au88x0_core.c 	stream_t *dma = &vortex->dma_wt[wtdma];
dma              1566 sound/pci/au88x0/au88x0_core.c 	switch (dma->fifo_status) {
dma              1568 sound/pci/au88x0/au88x0_core.c 		vortex_fifo_setwtctrl(vortex, wtdma, dma->dma_unknown,
dma              1573 sound/pci/au88x0/au88x0_core.c 			dma->dma_ctrl);
dma              1574 sound/pci/au88x0/au88x0_core.c 		vortex_fifo_setwtctrl(vortex, wtdma, dma->dma_unknown,
dma              1578 sound/pci/au88x0/au88x0_core.c 	dma->fifo_status = FIFO_PAUSE;
dma              1583 sound/pci/au88x0/au88x0_core.c 	stream_t *dma = &vortex->dma_wt[wtdma];
dma              1586 sound/pci/au88x0/au88x0_core.c 	if (dma->fifo_status == FIFO_START)
dma              1587 sound/pci/au88x0/au88x0_core.c 		vortex_fifo_setwtctrl(vortex, wtdma, dma->dma_unknown,
dma              1589 sound/pci/au88x0/au88x0_core.c 	else if (dma->fifo_status == FIFO_STOP)
dma              1591 sound/pci/au88x0/au88x0_core.c 	dma->fifo_status = FIFO_STOP;
dma              1592 sound/pci/au88x0/au88x0_core.c 	dma->fifo_enabled = 0;
dma              2109 sound/pci/au88x0/au88x0_core.c vortex_adb_allocroute(vortex_t *vortex, int dma, int nr_ch, int dir,
dma              2116 sound/pci/au88x0/au88x0_core.c 	if (dma >= 0) {
dma              2119 sound/pci/au88x0/au88x0_core.c 				      vortex->dma_adb[dma].resources, en,
dma              2123 sound/pci/au88x0/au88x0_core.c 		if ((dma =
dma              2129 sound/pci/au88x0/au88x0_core.c 	stream = &vortex->dma_adb[dma];
dma              2130 sound/pci/au88x0/au88x0_core.c 	stream->dma = dma;
dma              2194 sound/pci/au88x0/au88x0_core.c 							     dma,
dma              2216 sound/pci/au88x0/au88x0_core.c 								 dma,
dma              2225 sound/pci/au88x0/au88x0_core.c 					     ADB_DMA(stream->dma),
dma              2248 sound/pci/au88x0/au88x0_core.c 				p->dma = dma;
dma              2259 sound/pci/au88x0/au88x0_core.c 					     ADB_DMA(stream->dma),
dma              2308 sound/pci/au88x0/au88x0_core.c 						     src[0], dma);
dma              2316 sound/pci/au88x0/au88x0_core.c 							 src[1], dma);
dma              2319 sound/pci/au88x0/au88x0_core.c 	vortex->dma_adb[dma].nr_ch = nr_ch;
dma              2340 sound/pci/au88x0/au88x0_core.c 	return dma;
dma               228 sound/pci/au88x0/au88x0_pcm.c 		int dma, type = VORTEX_PCM_TYPE(substream->pcm);
dma               231 sound/pci/au88x0/au88x0_pcm.c 			vortex_adb_allocroute(chip, stream->dma,
dma               236 sound/pci/au88x0/au88x0_pcm.c 		dma =
dma               241 sound/pci/au88x0/au88x0_pcm.c 		if (dma < 0) {
dma               243 sound/pci/au88x0/au88x0_pcm.c 			return dma;
dma               245 sound/pci/au88x0/au88x0_pcm.c 		stream = substream->runtime->private_data = &chip->dma_adb[dma];
dma               248 sound/pci/au88x0/au88x0_pcm.c 		vortex_adbdma_setbuffers(chip, dma,
dma               265 sound/pci/au88x0/au88x0_pcm.c 		stream->dma = substream->number;
dma               292 sound/pci/au88x0/au88x0_pcm.c 			vortex_adb_allocroute(chip, stream->dma,
dma               301 sound/pci/au88x0/au88x0_pcm.c 			vortex_wt_allocroute(chip, stream->dma, 0);
dma               316 sound/pci/au88x0/au88x0_pcm.c 	int dma = stream->dma, fmt, dir;
dma               326 sound/pci/au88x0/au88x0_pcm.c 		vortex_adbdma_setmode(chip, dma, 1, dir, fmt,
dma               328 sound/pci/au88x0/au88x0_pcm.c 		vortex_adbdma_setstartbuffer(chip, dma, 0);
dma               330 sound/pci/au88x0/au88x0_pcm.c 			vortex_adb_setsrc(chip, dma, runtime->rate, dir);
dma               334 sound/pci/au88x0/au88x0_pcm.c 		vortex_wtdma_setmode(chip, dma, 1, fmt, 0, 0);
dma               336 sound/pci/au88x0/au88x0_pcm.c 		vortex_wtdma_setstartbuffer(chip, dma, 0);
dma               348 sound/pci/au88x0/au88x0_pcm.c 	int dma = stream->dma;
dma               357 sound/pci/au88x0/au88x0_pcm.c 			vortex_adbdma_resetup(chip, dma);
dma               358 sound/pci/au88x0/au88x0_pcm.c 			vortex_adbdma_startfifo(chip, dma);
dma               362 sound/pci/au88x0/au88x0_pcm.c 			dev_info(chip->card->dev, "wt start %d\n", dma);
dma               363 sound/pci/au88x0/au88x0_pcm.c 			vortex_wtdma_startfifo(chip, dma);
dma               372 sound/pci/au88x0/au88x0_pcm.c 			vortex_adbdma_stopfifo(chip, dma);
dma               375 sound/pci/au88x0/au88x0_pcm.c 			dev_info(chip->card->dev, "wt stop %d\n", dma);
dma               376 sound/pci/au88x0/au88x0_pcm.c 			vortex_wtdma_stopfifo(chip, dma);
dma               383 sound/pci/au88x0/au88x0_pcm.c 			vortex_adbdma_pausefifo(chip, dma);
dma               386 sound/pci/au88x0/au88x0_pcm.c 			vortex_wtdma_pausefifo(chip, dma);
dma               392 sound/pci/au88x0/au88x0_pcm.c 			vortex_adbdma_resumefifo(chip, dma);
dma               395 sound/pci/au88x0/au88x0_pcm.c 			vortex_wtdma_resumefifo(chip, dma);
dma               411 sound/pci/au88x0/au88x0_pcm.c 	int dma = stream->dma;
dma               416 sound/pci/au88x0/au88x0_pcm.c 		current_ptr = vortex_adbdma_getlinearpos(chip, dma);
dma               419 sound/pci/au88x0/au88x0_pcm.c 		current_ptr = vortex_wtdma_getlinearpos(chip, dma);
dma               568 sound/pci/au88x0/au88x0_pcm.c 				switch (vortex->dma_adb[p->dma].nr_ch) {
dma               679 sound/pci/au88x0/au88x0_pcm.c 			chip->pcm_vol[i].dma = -1;
dma               458 sound/pci/cs4281.c 	struct cs4281_dma dma[4];
dma               655 sound/pci/cs4281.c 	struct cs4281_dma *dma = substream->runtime->private_data;
dma               661 sound/pci/cs4281.c 		dma->valDCR |= BA0_DCR_MSK;
dma               662 sound/pci/cs4281.c 		dma->valFCR |= BA0_FCR_FEN;
dma               665 sound/pci/cs4281.c 		dma->valDCR &= ~BA0_DCR_MSK;
dma               666 sound/pci/cs4281.c 		dma->valFCR &= ~BA0_FCR_FEN;
dma               670 sound/pci/cs4281.c 		snd_cs4281_pokeBA0(chip, dma->regDMR, dma->valDMR & ~BA0_DMR_DMA);
dma               671 sound/pci/cs4281.c 		dma->valDMR |= BA0_DMR_DMA;
dma               672 sound/pci/cs4281.c 		dma->valDCR &= ~BA0_DCR_MSK;
dma               673 sound/pci/cs4281.c 		dma->valFCR |= BA0_FCR_FEN;
dma               677 sound/pci/cs4281.c 		dma->valDMR &= ~(BA0_DMR_DMA|BA0_DMR_POLL);
dma               678 sound/pci/cs4281.c 		dma->valDCR |= BA0_DCR_MSK;
dma               679 sound/pci/cs4281.c 		dma->valFCR &= ~BA0_FCR_FEN;
dma               681 sound/pci/cs4281.c 		if (dma->regFCR != BA0_FCR0)
dma               682 sound/pci/cs4281.c 			dma->valFCR &= ~BA0_FCR_FEN;
dma               688 sound/pci/cs4281.c 	snd_cs4281_pokeBA0(chip, dma->regDMR, dma->valDMR);
dma               689 sound/pci/cs4281.c 	snd_cs4281_pokeBA0(chip, dma->regFCR, dma->valFCR);
dma               690 sound/pci/cs4281.c 	snd_cs4281_pokeBA0(chip, dma->regDCR, dma->valDCR);
dma               718 sound/pci/cs4281.c static void snd_cs4281_mode(struct cs4281 *chip, struct cs4281_dma *dma,
dma               724 sound/pci/cs4281.c 	dma->valDMR = BA0_DMR_TYPE_SINGLE | BA0_DMR_AUTO |
dma               727 sound/pci/cs4281.c 		dma->valDMR |= BA0_DMR_MONO;
dma               729 sound/pci/cs4281.c 		dma->valDMR |= BA0_DMR_USIGN;
dma               731 sound/pci/cs4281.c 		dma->valDMR |= BA0_DMR_BEND;
dma               733 sound/pci/cs4281.c 	case 8: dma->valDMR |= BA0_DMR_SIZE8;
dma               735 sound/pci/cs4281.c 			dma->valDMR |= BA0_DMR_SWAPC;
dma               737 sound/pci/cs4281.c 	case 32: dma->valDMR |= BA0_DMR_SIZE20; break;
dma               739 sound/pci/cs4281.c 	dma->frag = 0;	/* for workaround */
dma               740 sound/pci/cs4281.c 	dma->valDCR = BA0_DCR_TCIE | BA0_DCR_MSK;
dma               742 sound/pci/cs4281.c 		dma->valDCR |= BA0_DCR_HTCIE;
dma               744 sound/pci/cs4281.c 	snd_cs4281_pokeBA0(chip, dma->regDBA, runtime->dma_addr);
dma               745 sound/pci/cs4281.c 	snd_cs4281_pokeBA0(chip, dma->regDBC, runtime->buffer_size - 1);
dma               746 sound/pci/cs4281.c 	rec_mono = (chip->dma[1].valDMR & BA0_DMR_MONO) == BA0_DMR_MONO;
dma               754 sound/pci/cs4281.c 		if (dma->left_slot == chip->src_left_play_slot) {
dma               756 sound/pci/cs4281.c 			snd_BUG_ON(dma->right_slot != chip->src_right_play_slot);
dma               760 sound/pci/cs4281.c 		if (dma->left_slot == chip->src_left_rec_slot) {
dma               762 sound/pci/cs4281.c 			snd_BUG_ON(dma->right_slot != chip->src_right_rec_slot);
dma               768 sound/pci/cs4281.c 	if (dma->regFCR == BA0_FCR0)
dma               769 sound/pci/cs4281.c 		snd_cs4281_pokeBA0(chip, dma->regFCR, snd_cs4281_peekBA0(chip, dma->regFCR) & ~BA0_FCR_FEN);
dma               771 sound/pci/cs4281.c 	dma->valFCR = BA0_FCR_LS(dma->left_slot) |
dma               772 sound/pci/cs4281.c 		      BA0_FCR_RS(capture && (dma->valDMR & BA0_DMR_MONO) ? 31 : dma->right_slot) |
dma               774 sound/pci/cs4281.c 		      BA0_FCR_OF(dma->fifo_offset);
dma               775 sound/pci/cs4281.c 	snd_cs4281_pokeBA0(chip, dma->regFCR, dma->valFCR | (capture ? BA0_FCR_PSH : 0));
dma               777 sound/pci/cs4281.c 	if (dma->regFCR == BA0_FCR0)
dma               778 sound/pci/cs4281.c 		snd_cs4281_pokeBA0(chip, dma->regFCR, dma->valFCR | BA0_FCR_FEN);
dma               780 sound/pci/cs4281.c 	snd_cs4281_pokeBA0(chip, dma->regFSIC, 0);
dma               797 sound/pci/cs4281.c 	struct cs4281_dma *dma = runtime->private_data;
dma               801 sound/pci/cs4281.c 	snd_cs4281_mode(chip, dma, runtime, 0, 1);
dma               809 sound/pci/cs4281.c 	struct cs4281_dma *dma = runtime->private_data;
dma               813 sound/pci/cs4281.c 	snd_cs4281_mode(chip, dma, runtime, 1, 1);
dma               821 sound/pci/cs4281.c 	struct cs4281_dma *dma = runtime->private_data;
dma               831 sound/pci/cs4281.c 	       snd_cs4281_peekBA0(chip, dma->regDCC) - 1;
dma               888 sound/pci/cs4281.c 	struct cs4281_dma *dma;
dma               890 sound/pci/cs4281.c 	dma = &chip->dma[0];
dma               891 sound/pci/cs4281.c 	dma->substream = substream;
dma               892 sound/pci/cs4281.c 	dma->left_slot = 0;
dma               893 sound/pci/cs4281.c 	dma->right_slot = 1;
dma               894 sound/pci/cs4281.c 	runtime->private_data = dma;
dma               907 sound/pci/cs4281.c 	struct cs4281_dma *dma;
dma               909 sound/pci/cs4281.c 	dma = &chip->dma[1];
dma               910 sound/pci/cs4281.c 	dma->substream = substream;
dma               911 sound/pci/cs4281.c 	dma->left_slot = 10;
dma               912 sound/pci/cs4281.c 	dma->right_slot = 11;
dma               913 sound/pci/cs4281.c 	runtime->private_data = dma;
dma               924 sound/pci/cs4281.c 	struct cs4281_dma *dma = substream->runtime->private_data;
dma               926 sound/pci/cs4281.c 	dma->substream = NULL;
dma               932 sound/pci/cs4281.c 	struct cs4281_dma *dma = substream->runtime->private_data;
dma               934 sound/pci/cs4281.c 	dma->substream = NULL;
dma              1572 sound/pci/cs4281.c 		struct cs4281_dma *dma = &chip->dma[tmp];
dma              1573 sound/pci/cs4281.c 		dma->regDBA = BA0_DBA0 + (tmp * 0x10);
dma              1574 sound/pci/cs4281.c 		dma->regDCA = BA0_DCA0 + (tmp * 0x10);
dma              1575 sound/pci/cs4281.c 		dma->regDBC = BA0_DBC0 + (tmp * 0x10);
dma              1576 sound/pci/cs4281.c 		dma->regDCC = BA0_DCC0 + (tmp * 0x10);
dma              1577 sound/pci/cs4281.c 		dma->regDMR = BA0_DMR0 + (tmp * 8);
dma              1578 sound/pci/cs4281.c 		dma->regDCR = BA0_DCR0 + (tmp * 8);
dma              1579 sound/pci/cs4281.c 		dma->regHDSR = BA0_HDSR0 + (tmp * 4);
dma              1580 sound/pci/cs4281.c 		dma->regFCR = BA0_FCR0 + (tmp * 4);
dma              1581 sound/pci/cs4281.c 		dma->regFSIC = BA0_FSIC0 + (tmp * 4);
dma              1582 sound/pci/cs4281.c 		dma->fifo_offset = tmp * CS4281_FIFO_SIZE;
dma              1583 sound/pci/cs4281.c 		snd_cs4281_pokeBA0(chip, dma->regFCR,
dma              1587 sound/pci/cs4281.c 				   BA0_FCR_OF(dma->fifo_offset));
dma              1596 sound/pci/cs4281.c 	chip->dma[0].valFCR = BA0_FCR_FEN | BA0_FCR_LS(0) |
dma              1599 sound/pci/cs4281.c 		              BA0_FCR_OF(chip->dma[0].fifo_offset);
dma              1600 sound/pci/cs4281.c 	snd_cs4281_pokeBA0(chip, chip->dma[0].regFCR, chip->dma[0].valFCR);
dma              1790 sound/pci/cs4281.c 	unsigned int status, dma, val;
dma              1802 sound/pci/cs4281.c 		for (dma = 0; dma < 4; dma++)
dma              1803 sound/pci/cs4281.c 			if (status & BA0_HISR_DMA(dma)) {
dma              1804 sound/pci/cs4281.c 				cdma = &chip->dma[dma];
dma               106 sound/pci/cs5535audio/cs5535audio_pcm.c 					 struct cs5535audio_dma *dma,
dma               118 sound/pci/cs5535audio/cs5535audio_pcm.c 	if (dma->desc_buf.area == NULL) {
dma               122 sound/pci/cs5535audio/cs5535audio_pcm.c 					&dma->desc_buf) < 0)
dma               124 sound/pci/cs5535audio/cs5535audio_pcm.c 		dma->period_bytes = dma->periods = 0;
dma               127 sound/pci/cs5535audio/cs5535audio_pcm.c 	if (dma->periods == periods && dma->period_bytes == period_bytes)
dma               133 sound/pci/cs5535audio/cs5535audio_pcm.c 	desc_addr = (u32) dma->desc_buf.addr;
dma               136 sound/pci/cs5535audio/cs5535audio_pcm.c 			&((struct cs5535audio_dma_desc *) dma->desc_buf.area)[i];
dma               144 sound/pci/cs5535audio/cs5535audio_pcm.c 	lastdesc = &((struct cs5535audio_dma_desc *) dma->desc_buf.area)[periods];
dma               145 sound/pci/cs5535audio/cs5535audio_pcm.c 	lastdesc->addr = cpu_to_le32((u32) dma->desc_buf.addr);
dma               148 sound/pci/cs5535audio/cs5535audio_pcm.c 	jmpprd_addr = (u32)dma->desc_buf.addr +
dma               151 sound/pci/cs5535audio/cs5535audio_pcm.c 	dma->substream = substream;
dma               152 sound/pci/cs5535audio/cs5535audio_pcm.c 	dma->period_bytes = period_bytes;
dma               153 sound/pci/cs5535audio/cs5535audio_pcm.c 	dma->periods = periods;
dma               155 sound/pci/cs5535audio/cs5535audio_pcm.c 	dma->ops->disable_dma(cs5535au);
dma               156 sound/pci/cs5535audio/cs5535audio_pcm.c 	dma->ops->setup_prd(cs5535au, jmpprd_addr);
dma               224 sound/pci/cs5535audio/cs5535audio_pcm.c 					  struct cs5535audio_dma *dma,
dma               227 sound/pci/cs5535audio/cs5535audio_pcm.c 	snd_dma_free_pages(&dma->desc_buf);
dma               228 sound/pci/cs5535audio/cs5535audio_pcm.c 	dma->desc_buf.area = NULL;
dma               229 sound/pci/cs5535audio/cs5535audio_pcm.c 	dma->substream = NULL;
dma               236 sound/pci/cs5535audio/cs5535audio_pcm.c 	struct cs5535audio_dma *dma = substream->runtime->private_data;
dma               243 sound/pci/cs5535audio/cs5535audio_pcm.c 	dma->buf_addr = substream->runtime->dma_addr;
dma               244 sound/pci/cs5535audio/cs5535audio_pcm.c 	dma->buf_bytes = params_buffer_bytes(hw_params);
dma               246 sound/pci/cs5535audio/cs5535audio_pcm.c 	err = cs5535audio_build_dma_packets(cs5535au, dma, substream,
dma               250 sound/pci/cs5535audio/cs5535audio_pcm.c 		dma->pcm_open_flag = 1;
dma               258 sound/pci/cs5535audio/cs5535audio_pcm.c 	struct cs5535audio_dma *dma = substream->runtime->private_data;
dma               260 sound/pci/cs5535audio/cs5535audio_pcm.c 	if (dma->pcm_open_flag) {
dma               267 sound/pci/cs5535audio/cs5535audio_pcm.c 		dma->pcm_open_flag = 0;
dma               269 sound/pci/cs5535audio/cs5535audio_pcm.c 	cs5535audio_clear_dma_packets(cs5535au, dma, substream);
dma               283 sound/pci/cs5535audio/cs5535audio_pcm.c 	struct cs5535audio_dma *dma = substream->runtime->private_data;
dma               289 sound/pci/cs5535audio/cs5535audio_pcm.c 		dma->ops->pause_dma(cs5535au);
dma               292 sound/pci/cs5535audio/cs5535audio_pcm.c 		dma->ops->enable_dma(cs5535au);
dma               295 sound/pci/cs5535audio/cs5535audio_pcm.c 		dma->ops->enable_dma(cs5535au);
dma               298 sound/pci/cs5535audio/cs5535audio_pcm.c 		dma->ops->enable_dma(cs5535au);
dma               301 sound/pci/cs5535audio/cs5535audio_pcm.c 		dma->ops->disable_dma(cs5535au);
dma               304 sound/pci/cs5535audio/cs5535audio_pcm.c 		dma->ops->disable_dma(cs5535au);
dma               320 sound/pci/cs5535audio/cs5535audio_pcm.c 	struct cs5535audio_dma *dma;
dma               322 sound/pci/cs5535audio/cs5535audio_pcm.c 	dma = substream->runtime->private_data;
dma               323 sound/pci/cs5535audio/cs5535audio_pcm.c 	curdma = dma->ops->read_dma_pntr(cs5535au);
dma               324 sound/pci/cs5535audio/cs5535audio_pcm.c 	if (curdma < dma->buf_addr) {
dma               326 sound/pci/cs5535audio/cs5535audio_pcm.c 					curdma, dma->buf_addr);
dma               329 sound/pci/cs5535audio/cs5535audio_pcm.c 	curdma -= dma->buf_addr;
dma               330 sound/pci/cs5535audio/cs5535audio_pcm.c 	if (curdma >= dma->buf_bytes) {
dma               332 sound/pci/cs5535audio/cs5535audio_pcm.c 					curdma, dma->buf_bytes);
dma                53 sound/pci/cs5535audio/cs5535audio_pm.c 		struct cs5535audio_dma *dma = &cs5535au->dmas[i];
dma                54 sound/pci/cs5535audio/cs5535audio_pm.c 		if (dma && dma->substream)
dma                55 sound/pci/cs5535audio/cs5535audio_pm.c 			dma->saved_prd = dma->ops->read_prd(cs5535au);
dma                86 sound/pci/cs5535audio/cs5535audio_pm.c 		struct cs5535audio_dma *dma = &cs5535au->dmas[i];
dma                87 sound/pci/cs5535audio/cs5535audio_pm.c 		if (dma && dma->substream) {
dma                88 sound/pci/cs5535audio/cs5535audio_pm.c 			dma->substream->ops->prepare(dma->substream);
dma                89 sound/pci/cs5535audio/cs5535audio_pm.c 			dma->ops->setup_prd(cs5535au, dma->saved_prd);
dma               499 sound/pci/es1968.c 	struct snd_dma_buffer dma;
dma              1011 sound/pci/es1968.c 		pa -= chip->dma.addr;
dma              1104 sound/pci/es1968.c 	pa -= chip->dma.addr;
dma              1409 sound/pci/es1968.c 	if (! chip->dma.area)
dma              1411 sound/pci/es1968.c 	snd_dma_free_pages(&chip->dma);
dma              1425 sound/pci/es1968.c 	chip->dma.dev.type = SNDRV_DMA_TYPE_DEV;
dma              1426 sound/pci/es1968.c 	chip->dma.dev.dev = snd_dma_pci_data(chip->pci);
dma              1429 sound/pci/es1968.c 					   chip->total_bufsize, &chip->dma);
dma              1430 sound/pci/es1968.c 	if (err < 0 || ! chip->dma.area) {
dma              1436 sound/pci/es1968.c 	if ((chip->dma.addr + chip->dma.bytes - 1) & ~((1 << 28) - 1)) {
dma              1437 sound/pci/es1968.c 		snd_dma_free_pages(&chip->dma);
dma              1449 sound/pci/es1968.c 	memset(chip->dma.area, 0, ESM_MEM_ALIGN);
dma              1450 sound/pci/es1968.c 	chunk->buf = chip->dma;
dma              1722 sound/pci/es1968.c 	pa = (unsigned int)((memory->buf.addr - chip->dma.addr) >> 1);
dma              1806 sound/pci/es1968.c 	wave_set_register(chip, 0x01FC, chip->dma.addr >> 12);
dma              1807 sound/pci/es1968.c 	wave_set_register(chip, 0x01FD, chip->dma.addr >> 12);
dma              1808 sound/pci/es1968.c 	wave_set_register(chip, 0x01FE, chip->dma.addr >> 12);
dma              1809 sound/pci/es1968.c 	wave_set_register(chip, 0x01FF, chip->dma.addr >> 12);
dma              2398 sound/pci/es1968.c 	if (chip->dma.addr) {
dma              2400 sound/pci/es1968.c 		wave_set_register(chip, 0x01FC, chip->dma.addr >> 12);
dma              2626 sound/pci/hda/patch_ca0132.c static int dma_reset(struct dma_engine *dma)
dma              2628 sound/pci/hda/patch_ca0132.c 	struct hda_codec *codec = dma->codec;
dma              2632 sound/pci/hda/patch_ca0132.c 	if (dma->dmab->area)
dma              2633 sound/pci/hda/patch_ca0132.c 		snd_hda_codec_load_dsp_cleanup(codec, dma->dmab);
dma              2636 sound/pci/hda/patch_ca0132.c 			dma->m_converter_format,
dma              2637 sound/pci/hda/patch_ca0132.c 			dma->buf_size,
dma              2638 sound/pci/hda/patch_ca0132.c 			dma->dmab);
dma              2645 sound/pci/hda/patch_ca0132.c static int dma_set_state(struct dma_engine *dma, enum dma_state state)
dma              2660 sound/pci/hda/patch_ca0132.c 	snd_hda_codec_load_dsp_trigger(dma->codec, cmd);
dma              2664 sound/pci/hda/patch_ca0132.c static unsigned int dma_get_buffer_size(struct dma_engine *dma)
dma              2666 sound/pci/hda/patch_ca0132.c 	return dma->dmab->bytes;
dma              2669 sound/pci/hda/patch_ca0132.c static unsigned char *dma_get_buffer_addr(struct dma_engine *dma)
dma              2671 sound/pci/hda/patch_ca0132.c 	return dma->dmab->area;
dma              2674 sound/pci/hda/patch_ca0132.c static int dma_xfer(struct dma_engine *dma,
dma              2678 sound/pci/hda/patch_ca0132.c 	memcpy(dma->dmab->area, data, count);
dma              2683 sound/pci/hda/patch_ca0132.c 		struct dma_engine *dma,
dma              2687 sound/pci/hda/patch_ca0132.c 		*format = dma->m_converter_format;
dma              2690 sound/pci/hda/patch_ca0132.c static unsigned int dma_get_stream_id(struct dma_engine *dma)
dma              2692 sound/pci/hda/patch_ca0132.c 	struct ca0132_spec *spec = dma->codec->spec;
dma               151 sound/ppc/pmac.c 	while ((in_le32(&rec->dma->status) & RUN) && timeout-- > 0)
dma               173 sound/ppc/pmac.c 	out_le32(&rec->dma->control, (RUN|WAKE|FLUSH|PAUSE) << 16);
dma               182 sound/ppc/pmac.c 	out_le32(&rec->dma->cmdptr, cmd->addr);
dma               190 sound/ppc/pmac.c 	out_le32(&rec->dma->control, status | (status << 16));
dma               280 sound/ppc/pmac.c 		(void)in_le32(&rec->dma->status);
dma               409 sound/ppc/pmac.c 	(void)in_le32(&rec->dma->status);
dma               410 sound/ppc/pmac.c 	out_le32(&rec->dma->control, (RUN|PAUSE|FLUSH|WAKE) << 16);
dma               438 sound/ppc/pmac.c 	out_le32(&rec->dma->cmdptr, emergency_dbdma.addr);
dma               441 sound/ppc/pmac.c 	(void)in_le32(&rec->dma->status);
dma               443 sound/ppc/pmac.c 	out_le32(&rec->dma->control, ((RUN|WAKE) << 16) + (RUN|WAKE));
dma               734 sound/ppc/pmac.c 	out_le32(&chip->playback.dma->control, (RUN|PAUSE|FLUSH|WAKE|DEAD) << 16);
dma               736 sound/ppc/pmac.c 	out_le32(&chip->capture.dma->control, (RUN|PAUSE|FLUSH|WAKE|DEAD) << 16);
dma               859 sound/ppc/pmac.c 	iounmap(chip->playback.dma);
dma               860 sound/ppc/pmac.c 	iounmap(chip->capture.dma);
dma              1253 sound/ppc/pmac.c 	chip->playback.dma = ioremap(txdma_addr, 0x100);
dma              1254 sound/ppc/pmac.c 	chip->capture.dma = ioremap(rxdma_addr, 0x100);
dma                61 sound/ppc/pmac.h 	volatile struct dbdma_regs __iomem *dma;
dma                35 sound/soc/au1x/dma.c 	int dma;
dma               108 sound/soc/au1x/dma.c 		disable_dma(stream->dma);
dma               116 sound/soc/au1x/dma.c 	init_dma(stream->dma);
dma               117 sound/soc/au1x/dma.c 	if (get_dma_active_buffer(stream->dma) == 0) {
dma               118 sound/soc/au1x/dma.c 		clear_dma_done0(stream->dma);
dma               119 sound/soc/au1x/dma.c 		set_dma_addr0(stream->dma, stream->buffer->start);
dma               120 sound/soc/au1x/dma.c 		set_dma_count0(stream->dma, stream->period_size >> 1);
dma               121 sound/soc/au1x/dma.c 		set_dma_addr1(stream->dma, stream->buffer->next->start);
dma               122 sound/soc/au1x/dma.c 		set_dma_count1(stream->dma, stream->period_size >> 1);
dma               124 sound/soc/au1x/dma.c 		clear_dma_done1(stream->dma);
dma               125 sound/soc/au1x/dma.c 		set_dma_addr1(stream->dma, stream->buffer->start);
dma               126 sound/soc/au1x/dma.c 		set_dma_count1(stream->dma, stream->period_size >> 1);
dma               127 sound/soc/au1x/dma.c 		set_dma_addr0(stream->dma, stream->buffer->next->start);
dma               128 sound/soc/au1x/dma.c 		set_dma_count0(stream->dma, stream->period_size >> 1);
dma               130 sound/soc/au1x/dma.c 	enable_dma_buffers(stream->dma);
dma               131 sound/soc/au1x/dma.c 	start_dma(stream->dma);
dma               139 sound/soc/au1x/dma.c 	switch (get_dma_buffer_done(stream->dma)) {
dma               142 sound/soc/au1x/dma.c 		clear_dma_done0(stream->dma);
dma               143 sound/soc/au1x/dma.c 		set_dma_addr0(stream->dma, stream->buffer->next->start);
dma               144 sound/soc/au1x/dma.c 		set_dma_count0(stream->dma, stream->period_size >> 1);
dma               145 sound/soc/au1x/dma.c 		enable_dma_buffer0(stream->dma);
dma               149 sound/soc/au1x/dma.c 		clear_dma_done1(stream->dma);
dma               150 sound/soc/au1x/dma.c 		set_dma_addr1(stream->dma, stream->buffer->next->start);
dma               151 sound/soc/au1x/dma.c 		set_dma_count1(stream->dma, stream->period_size >> 1);
dma               152 sound/soc/au1x/dma.c 		enable_dma_buffer1(stream->dma);
dma               155 sound/soc/au1x/dma.c 		pr_debug("DMA %d missed interrupt.\n", stream->dma);
dma               160 sound/soc/au1x/dma.c 		pr_debug("DMA %d empty irq.\n", stream->dma);
dma               203 sound/soc/au1x/dma.c 	ctx->stream[s].dma = request_au1000_dma(dmaids[s], name,
dma               206 sound/soc/au1x/dma.c 	set_dma_mode(ctx->stream[s].dma,
dma               207 sound/soc/au1x/dma.c 		     get_dma_mode(ctx->stream[s].dma) & ~DMA_NC);
dma               222 sound/soc/au1x/dma.c 	free_au1000_dma(ctx->stream[stype].dma);
dma               277 sound/soc/au1x/dma.c 	location = get_dma_residue(stream->dma);
dma               389 sound/soc/fsl/fsl_dma.c 	struct dma_object *dma =
dma               414 sound/soc/fsl/fsl_dma.c 	if (dma->assigned) {
dma               426 sound/soc/fsl/fsl_dma.c 		dma_private->ssi_sxx_phys = dma->ssi_stx_phys;
dma               428 sound/soc/fsl/fsl_dma.c 		dma_private->ssi_sxx_phys = dma->ssi_srx_phys;
dma               430 sound/soc/fsl/fsl_dma.c 	dma_private->ssi_fifo_depth = dma->ssi_fifo_depth;
dma               431 sound/soc/fsl/fsl_dma.c 	dma_private->dma_channel = dma->channel;
dma               432 sound/soc/fsl/fsl_dma.c 	dma_private->irq = dma->irq;
dma               447 sound/soc/fsl/fsl_dma.c 	dma->assigned = true;
dma               806 sound/soc/fsl/fsl_dma.c 	struct dma_object *dma =
dma               819 sound/soc/fsl/fsl_dma.c 	dma->assigned = false;
dma               886 sound/soc/fsl/fsl_dma.c 	struct dma_object *dma;
dma               908 sound/soc/fsl/fsl_dma.c 	dma = kzalloc(sizeof(*dma), GFP_KERNEL);
dma               909 sound/soc/fsl/fsl_dma.c 	if (!dma) {
dma               914 sound/soc/fsl/fsl_dma.c 	dma->dai.name = DRV_NAME;
dma               915 sound/soc/fsl/fsl_dma.c 	dma->dai.ops = &fsl_dma_ops;
dma               916 sound/soc/fsl/fsl_dma.c 	dma->dai.pcm_new = fsl_dma_new;
dma               917 sound/soc/fsl/fsl_dma.c 	dma->dai.pcm_free = fsl_dma_free_dma_buffers;
dma               920 sound/soc/fsl/fsl_dma.c 	dma->ssi_stx_phys = res.start + REG_SSI_STX0;
dma               921 sound/soc/fsl/fsl_dma.c 	dma->ssi_srx_phys = res.start + REG_SSI_SRX0;
dma               925 sound/soc/fsl/fsl_dma.c 		dma->ssi_fifo_depth = be32_to_cpup(iprop);
dma               928 sound/soc/fsl/fsl_dma.c 		dma->ssi_fifo_depth = 8;
dma               932 sound/soc/fsl/fsl_dma.c 	ret = devm_snd_soc_register_component(&pdev->dev, &dma->dai, NULL, 0);
dma               935 sound/soc/fsl/fsl_dma.c 		kfree(dma);
dma               939 sound/soc/fsl/fsl_dma.c 	dma->channel = of_iomap(np, 0);
dma               940 sound/soc/fsl/fsl_dma.c 	dma->irq = irq_of_parse_and_map(np, 0);
dma               942 sound/soc/fsl/fsl_dma.c 	dev_set_drvdata(&pdev->dev, dma);
dma               949 sound/soc/fsl/fsl_dma.c 	struct dma_object *dma = dev_get_drvdata(&pdev->dev);
dma               951 sound/soc/fsl/fsl_dma.c 	iounmap(dma->channel);
dma               952 sound/soc/fsl/fsl_dma.c 	irq_dispose_mapping(dma->irq);
dma               953 sound/soc/fsl/fsl_dma.c 	kfree(dma);
dma                26 sound/soc/fsl/imx-pcm.h 	int dma, enum sdma_peripheral_type peripheral_type)
dma                28 sound/soc/fsl/imx-pcm.h 	dma_data->dma_request = dma;
dma               301 sound/soc/intel/common/sst-dsp-priv.h 	struct sst_dma *dma;
dma                75 sound/soc/intel/common/sst-firmware.c 	struct sst_dma *dma = sst->dma;
dma                77 sound/soc/intel/common/sst-firmware.c 	if (dma->ch == NULL) {
dma                85 sound/soc/intel/common/sst-firmware.c 	desc = dma->ch->device->device_prep_dma_memcpy(dma->ch, dest_addr,
dma               219 sound/soc/intel/common/sst-firmware.c 	struct sst_dma *dma = dsp->dma;
dma               228 sound/soc/intel/common/sst-firmware.c 	dma->ch = dma_request_channel(mask, dma_chan_filter, dsp);
dma               229 sound/soc/intel/common/sst-firmware.c 	if (dma->ch == NULL) {
dma               240 sound/soc/intel/common/sst-firmware.c 	ret = dmaengine_slave_config(dma->ch, &slave);
dma               244 sound/soc/intel/common/sst-firmware.c 		dma_release_channel(dma->ch);
dma               245 sound/soc/intel/common/sst-firmware.c 		dma->ch = NULL;
dma               254 sound/soc/intel/common/sst-firmware.c 	struct sst_dma *dma = dsp->dma;
dma               256 sound/soc/intel/common/sst-firmware.c 	if (!dma->ch)
dma               259 sound/soc/intel/common/sst-firmware.c 	dma_release_channel(dma->ch);
dma               260 sound/soc/intel/common/sst-firmware.c 	dma->ch = NULL;
dma               267 sound/soc/intel/common/sst-firmware.c 	struct sst_dma *dma;
dma               286 sound/soc/intel/common/sst-firmware.c 	dma = devm_kzalloc(sst->dev, sizeof(struct sst_dma), GFP_KERNEL);
dma               287 sound/soc/intel/common/sst-firmware.c 	if (!dma)
dma               290 sound/soc/intel/common/sst-firmware.c 	dma->sst = sst;
dma               299 sound/soc/intel/common/sst-firmware.c 	dma->chip = dw_probe(sst->dma_dev, &mem, sst_pdata->irq);
dma               300 sound/soc/intel/common/sst-firmware.c 	if (IS_ERR(dma->chip)) {
dma               302 sound/soc/intel/common/sst-firmware.c 		ret = PTR_ERR(dma->chip);
dma               306 sound/soc/intel/common/sst-firmware.c 	sst->dma = dma;
dma               311 sound/soc/intel/common/sst-firmware.c 	devm_kfree(sst->dev, dma);
dma               315 sound/soc/intel/common/sst-firmware.c static void sst_dma_free(struct sst_dma *dma)
dma               318 sound/soc/intel/common/sst-firmware.c 	if (dma == NULL)
dma               321 sound/soc/intel/common/sst-firmware.c 	if (dma->ch)
dma               322 sound/soc/intel/common/sst-firmware.c 		dma_release_channel(dma->ch);
dma               324 sound/soc/intel/common/sst-firmware.c 	if (dma->chip)
dma               325 sound/soc/intel/common/sst-firmware.c 		dw_remove(dma->chip);
dma              1268 sound/soc/intel/common/sst-firmware.c 	sst_dma_free(sst->dma);
dma                78 sound/soc/kirkwood/kirkwood-dma.c 			       unsigned long dma,
dma                90 sound/soc/kirkwood/kirkwood-dma.c 		if ((cs->base & 0xffff0000) < (dma & 0xffff0000)) {
dma                81 sound/soc/pxa/pxa-ssp.c 			int out, struct snd_dmaengine_dai_dma_data *dma)
dma                83 sound/soc/pxa/pxa-ssp.c 	dma->addr_width = width4 ? DMA_SLAVE_BUSWIDTH_4_BYTES :
dma                85 sound/soc/pxa/pxa-ssp.c 	dma->maxburst = 16;
dma                86 sound/soc/pxa/pxa-ssp.c 	dma->addr = ssp->phys_base + SSDR;
dma                94 sound/soc/pxa/pxa-ssp.c 	struct snd_dmaengine_dai_dma_data *dma;
dma               105 sound/soc/pxa/pxa-ssp.c 	dma = kzalloc(sizeof(struct snd_dmaengine_dai_dma_data), GFP_KERNEL);
dma               106 sound/soc/pxa/pxa-ssp.c 	if (!dma)
dma               108 sound/soc/pxa/pxa-ssp.c 	dma->chan_name = substream->stream == SNDRV_PCM_STREAM_PLAYBACK ?
dma               111 sound/soc/pxa/pxa-ssp.c 	snd_soc_dai_set_dma_data(cpu_dai, substream, dma);
dma               631 sound/soc/sh/fsi.c 		u32 dma = 0;
dma               639 sound/soc/sh/fsi.c 			dma |= VDMD_FRONT;
dma               644 sound/soc/sh/fsi.c 			dma |= VDMD_STREAM;
dma               650 sound/soc/sh/fsi.c 			dma |= VDMD_BACK;
dma               656 sound/soc/sh/fsi.c 			fsi_reg_write(fsi, OUT_DMAC,	dma);
dma               658 sound/soc/sh/fsi.c 			fsi_reg_write(fsi, IN_DMAC,	dma);
dma                43 sound/soc/sh/rcar/dma.c 	} dma;
dma                52 sound/soc/sh/rcar/dma.c #define rsnd_priv_to_dmac(p)	((struct rsnd_dma_ctrl *)(p)->dma)
dma                54 sound/soc/sh/rcar/dma.c #define rsnd_dma_to_dmaen(dma)	(&(dma)->dma.en)
dma                55 sound/soc/sh/rcar/dma.c #define rsnd_dma_to_dmapp(dma)	(&(dma)->dma.pp)
dma               100 sound/soc/sh/rcar/dma.c 	struct rsnd_dma *dma = rsnd_mod_to_dma(mod);
dma               101 sound/soc/sh/rcar/dma.c 	struct rsnd_dmaen *dmaen = rsnd_dma_to_dmaen(dma);
dma               113 sound/soc/sh/rcar/dma.c 	struct rsnd_dma *dma = rsnd_mod_to_dma(mod);
dma               114 sound/soc/sh/rcar/dma.c 	struct rsnd_dmaen *dmaen = rsnd_dma_to_dmaen(dma);
dma               133 sound/soc/sh/rcar/dma.c 	struct rsnd_dma *dma = rsnd_mod_to_dma(mod);
dma               134 sound/soc/sh/rcar/dma.c 	struct rsnd_dmaen *dmaen = rsnd_dma_to_dmaen(dma);
dma               147 sound/soc/sh/rcar/dma.c 						 dma->mod_from,
dma               148 sound/soc/sh/rcar/dma.c 						 dma->mod_to);
dma               162 sound/soc/sh/rcar/dma.c 	struct rsnd_dma *dma = rsnd_mod_to_dma(mod);
dma               163 sound/soc/sh/rcar/dma.c 	struct rsnd_dmaen *dmaen = rsnd_dma_to_dmaen(dma);
dma               172 sound/soc/sh/rcar/dma.c 	cfg.src_addr	= dma->src_addr;
dma               173 sound/soc/sh/rcar/dma.c 	cfg.dst_addr	= dma->dst_addr;
dma               198 sound/soc/sh/rcar/dma.c 	desc->callback_param	= rsnd_mod_get(dma);
dma               233 sound/soc/sh/rcar/dma.c 			   struct rsnd_dma *dma,
dma               275 sound/soc/sh/rcar/dma.c 	struct rsnd_dma *dma = rsnd_mod_to_dma(mod);
dma               276 sound/soc/sh/rcar/dma.c 	struct rsnd_dmaen *dmaen = rsnd_dma_to_dmaen(dma);
dma               391 sound/soc/sh/rcar/dma.c #define rsnd_dmapp_addr(dmac, dma, reg) \
dma               393 sound/soc/sh/rcar/dma.c 	 (0x10 * rsnd_dma_to_dmapp(dma)->dmapp_id))
dma               394 sound/soc/sh/rcar/dma.c static void rsnd_dmapp_write(struct rsnd_dma *dma, u32 data, u32 reg)
dma               396 sound/soc/sh/rcar/dma.c 	struct rsnd_mod *mod = rsnd_mod_get(dma);
dma               401 sound/soc/sh/rcar/dma.c 	dev_dbg(dev, "w 0x%px : %08x\n", rsnd_dmapp_addr(dmac, dma, reg), data);
dma               403 sound/soc/sh/rcar/dma.c 	iowrite32(data, rsnd_dmapp_addr(dmac, dma, reg));
dma               406 sound/soc/sh/rcar/dma.c static u32 rsnd_dmapp_read(struct rsnd_dma *dma, u32 reg)
dma               408 sound/soc/sh/rcar/dma.c 	struct rsnd_mod *mod = rsnd_mod_get(dma);
dma               412 sound/soc/sh/rcar/dma.c 	return ioread32(rsnd_dmapp_addr(dmac, dma, reg));
dma               415 sound/soc/sh/rcar/dma.c static void rsnd_dmapp_bset(struct rsnd_dma *dma, u32 data, u32 mask, u32 reg)
dma               417 sound/soc/sh/rcar/dma.c 	struct rsnd_mod *mod = rsnd_mod_get(dma);
dma               420 sound/soc/sh/rcar/dma.c 	void __iomem *addr = rsnd_dmapp_addr(dmac, dma, reg);
dma               433 sound/soc/sh/rcar/dma.c 	struct rsnd_dma *dma = rsnd_mod_to_dma(mod);
dma               436 sound/soc/sh/rcar/dma.c 	rsnd_dmapp_bset(dma, 0,  PDMACHCR_DE, PDMACHCR);
dma               439 sound/soc/sh/rcar/dma.c 		if (0 == (rsnd_dmapp_read(dma, PDMACHCR) & PDMACHCR_DE))
dma               451 sound/soc/sh/rcar/dma.c 	struct rsnd_dma *dma = rsnd_mod_to_dma(mod);
dma               452 sound/soc/sh/rcar/dma.c 	struct rsnd_dmapp *dmapp = rsnd_dma_to_dmapp(dma);
dma               454 sound/soc/sh/rcar/dma.c 	rsnd_dmapp_write(dma, dma->src_addr,	PDMASAR);
dma               455 sound/soc/sh/rcar/dma.c 	rsnd_dmapp_write(dma, dma->dst_addr,	PDMADAR);
dma               456 sound/soc/sh/rcar/dma.c 	rsnd_dmapp_write(dma, dmapp->chcr,	PDMACHCR);
dma               462 sound/soc/sh/rcar/dma.c 			     struct rsnd_dma *dma,
dma               465 sound/soc/sh/rcar/dma.c 	struct rsnd_dmapp *dmapp = rsnd_dma_to_dmapp(dma);
dma               476 sound/soc/sh/rcar/dma.c 		dmapp->dmapp_id, &dma->src_addr, &dma->dst_addr, dmapp->chcr);
dma               736 sound/soc/sh/rcar/dma.c 	struct rsnd_dma *dma;
dma               739 sound/soc/sh/rcar/dma.c 	int (*attach)(struct rsnd_dai_stream *io, struct rsnd_dma *dma,
dma               776 sound/soc/sh/rcar/dma.c 	dma = devm_kzalloc(dev, sizeof(*dma), GFP_KERNEL);
dma               777 sound/soc/sh/rcar/dma.c 	if (!dma)
dma               780 sound/soc/sh/rcar/dma.c 	*dma_mod = rsnd_mod_get(dma);
dma               792 sound/soc/sh/rcar/dma.c 	ret = attach(io, dma, mod_from, mod_to);
dma               796 sound/soc/sh/rcar/dma.c 	dma->src_addr = rsnd_dma_addr(io, mod_from, is_play, 1);
dma               797 sound/soc/sh/rcar/dma.c 	dma->dst_addr = rsnd_dma_addr(io, mod_to,   is_play, 0);
dma               798 sound/soc/sh/rcar/dma.c 	dma->mod_from = mod_from;
dma               799 sound/soc/sh/rcar/dma.c 	dma->mod_to   = mod_to;
dma               845 sound/soc/sh/rcar/dma.c 	priv->dma = dmac;
dma               493 sound/soc/sh/rcar/rsnd.h 	struct rsnd_mod *dma;
dma               633 sound/soc/sh/rcar/rsnd.h 	void *dma;
dma                25 sound/soc/sh/rcar/src.c 	struct rsnd_mod *dma;
dma               544 sound/soc/sh/rcar/src.c 	ret = rsnd_dma_attach(io, mod, &src->dma);
dma               959 sound/soc/sh/rcar/ssi.c 	ret = rsnd_dma_attach(io, mod, &io->dma);
dma                72 sound/soc/sprd/sprd-pcm-compress.c 	struct sprd_compr_dma dma[SPRD_COMPR_DMA_CHANS];
dma               118 sound/soc/sprd/sprd-pcm-compress.c 	struct sprd_compr_dma *dma = &stream->dma[1];
dma               121 sound/soc/sprd/sprd-pcm-compress.c 	stream->copied_total += dma->trans_len;
dma               140 sound/soc/sprd/sprd-pcm-compress.c 	struct sprd_compr_dma *dma = &stream->dma[channel];
dma               156 sound/soc/sprd/sprd-pcm-compress.c 	dma->chan = dma_request_slave_channel(dev,
dma               158 sound/soc/sprd/sprd-pcm-compress.c 	if (!dma->chan) {
dma               199 sound/soc/sprd/sprd-pcm-compress.c 	dma->trans_len = period * period_cnt;
dma               216 sound/soc/sprd/sprd-pcm-compress.c 		sg_dma_len(sgt) = dma->trans_len;
dma               224 sound/soc/sprd/sprd-pcm-compress.c 	link.virt_addr = (unsigned long)dma->virt;
dma               225 sound/soc/sprd/sprd-pcm-compress.c 	link.phy_addr = dma->phys;
dma               227 sound/soc/sprd/sprd-pcm-compress.c 	ret = dmaengine_slave_config(dma->chan, &config);
dma               238 sound/soc/sprd/sprd-pcm-compress.c 	dma->desc = dma->chan->device->device_prep_slave_sg(dma->chan, sg,
dma               241 sound/soc/sprd/sprd-pcm-compress.c 	if (!dma->desc) {
dma               249 sound/soc/sprd/sprd-pcm-compress.c 		dma->desc->callback = sprd_platform_compr_dma_complete;
dma               250 sound/soc/sprd/sprd-pcm-compress.c 		dma->desc->callback_param = cstream;
dma               260 sound/soc/sprd/sprd-pcm-compress.c 	dma_release_channel(dma->chan);
dma               311 sound/soc/sprd/sprd-pcm-compress.c 	dma_release_channel(stream->dma[0].chan);
dma               313 sound/soc/sprd/sprd-pcm-compress.c 	dma_release_channel(stream->dma[1].chan);
dma               351 sound/soc/sprd/sprd-pcm-compress.c 	stream->dma[0].virt = stream->iram_buffer.area + SPRD_COMPR_IRAM_SIZE;
dma               352 sound/soc/sprd/sprd-pcm-compress.c 	stream->dma[0].phys = stream->iram_buffer.addr + SPRD_COMPR_IRAM_SIZE;
dma               371 sound/soc/sprd/sprd-pcm-compress.c 	stream->dma[1].virt = stream->compr_buffer.area + SPRD_COMPR_AREA_SIZE;
dma               372 sound/soc/sprd/sprd-pcm-compress.c 	stream->dma[1].phys = stream->compr_buffer.addr + SPRD_COMPR_AREA_SIZE;
dma               406 sound/soc/sprd/sprd-pcm-compress.c 		struct sprd_compr_dma *dma = &stream->dma[i];
dma               408 sound/soc/sprd/sprd-pcm-compress.c 		if (dma->chan) {
dma               409 sound/soc/sprd/sprd-pcm-compress.c 			dma_release_channel(dma->chan);
dma               410 sound/soc/sprd/sprd-pcm-compress.c 			dma->chan = NULL;
dma               443 sound/soc/sprd/sprd-pcm-compress.c 			struct sprd_compr_dma *dma = &stream->dma[i];
dma               445 sound/soc/sprd/sprd-pcm-compress.c 			if (!dma->desc)
dma               448 sound/soc/sprd/sprd-pcm-compress.c 			dma->cookie = dmaengine_submit(dma->desc);
dma               449 sound/soc/sprd/sprd-pcm-compress.c 			ret = dma_submit_error(dma->cookie);
dma               458 sound/soc/sprd/sprd-pcm-compress.c 			struct sprd_compr_dma *dma = &stream->dma[i];
dma               460 sound/soc/sprd/sprd-pcm-compress.c 			if (dma->chan)
dma               461 sound/soc/sprd/sprd-pcm-compress.c 				dma_async_issue_pending(dma->chan);
dma               469 sound/soc/sprd/sprd-pcm-compress.c 			struct sprd_compr_dma *dma = &stream->dma[i];
dma               471 sound/soc/sprd/sprd-pcm-compress.c 			if (dma->chan)
dma               472 sound/soc/sprd/sprd-pcm-compress.c 				dmaengine_terminate_async(dma->chan);
dma               487 sound/soc/sprd/sprd-pcm-compress.c 			struct sprd_compr_dma *dma = &stream->dma[i];
dma               489 sound/soc/sprd/sprd-pcm-compress.c 			if (dma->chan)
dma               490 sound/soc/sprd/sprd-pcm-compress.c 				dmaengine_pause(dma->chan);
dma               499 sound/soc/sprd/sprd-pcm-compress.c 			struct sprd_compr_dma *dma = &stream->dma[i];
dma               501 sound/soc/sprd/sprd-pcm-compress.c 			if (dma->chan)
dma               502 sound/soc/sprd/sprd-pcm-compress.c 				dmaengine_resume(dma->chan);
dma               652 sound/soc/ti/davinci-i2s.c 	int *dma;
dma               683 sound/soc/ti/davinci-i2s.c 		dma = &dev->dma_request[SNDRV_PCM_STREAM_PLAYBACK];
dma               684 sound/soc/ti/davinci-i2s.c 		*dma = res->start;
dma               685 sound/soc/ti/davinci-i2s.c 		dma_data->filter_data = dma;
dma               698 sound/soc/ti/davinci-i2s.c 		dma = &dev->dma_request[SNDRV_PCM_STREAM_CAPTURE];
dma               699 sound/soc/ti/davinci-i2s.c 		*dma = res->start;
dma               700 sound/soc/ti/davinci-i2s.c 		dma_data->filter_data = dma;
dma              2108 sound/soc/ti/davinci-mcasp.c 	int *dma;
dma              2248 sound/soc/ti/davinci-mcasp.c 	dma = &mcasp->dma_request[SNDRV_PCM_STREAM_PLAYBACK];
dma              2251 sound/soc/ti/davinci-mcasp.c 		*dma = res->start;
dma              2253 sound/soc/ti/davinci-mcasp.c 		*dma = pdata->tx_dma_channel;
dma              2259 sound/soc/ti/davinci-mcasp.c 		dma_data->filter_data = dma;
dma              2270 sound/soc/ti/davinci-mcasp.c 		dma = &mcasp->dma_request[SNDRV_PCM_STREAM_CAPTURE];
dma              2273 sound/soc/ti/davinci-mcasp.c 			*dma = res->start;
dma              2275 sound/soc/ti/davinci-mcasp.c 			*dma = pdata->rx_dma_channel;
dma              2281 sound/soc/ti/davinci-mcasp.c 			dma_data->filter_data = dma;
dma               307 sound/sparc/dbri.c 	struct dbri_dma *dma;	/* Pointer to our DMA block */
dma               670 sound/sparc/dbri.c 	if (dbri->cmdptr - dbri->dma->cmd + len < DBRI_NO_CMDS - 2)
dma               673 sound/sparc/dbri.c 		return dbri->dma->cmd;
dma               700 sound/sparc/dbri.c 	addr = dvma_addr + (cmd - len - dbri->dma->cmd) * sizeof(s32);
dma               717 sound/sparc/dbri.c 		for (ptr = dbri->dma->cmd; ptr < cmd+2; ptr++)
dma               777 sound/sparc/dbri.c 	dbri->dma->intr[0] = dma_addr;
dma               783 sound/sparc/dbri.c 	cmd = dbri->cmdptr = dbri->dma->cmd;
dma               850 sound/sparc/dbri.c 			dbri->dma->desc[desc].ba = 0;
dma               851 sound/sparc/dbri.c 			dbri->dma->desc[desc].nda = 0;
dma              1140 sound/sparc/dbri.c 			dbri->dma->desc[desc].ba = 0;
dma              1141 sound/sparc/dbri.c 			dbri->dma->desc[desc].nda = 0;
dma              1154 sound/sparc/dbri.c 			if (!dbri->dma->desc[desc].ba)
dma              1172 sound/sparc/dbri.c 		dbri->dma->desc[desc].ba = dvma_buffer;
dma              1173 sound/sparc/dbri.c 		dbri->dma->desc[desc].nda = 0;
dma              1176 sound/sparc/dbri.c 			dbri->dma->desc[desc].word1 = DBRI_TD_CNT(mylen);
dma              1177 sound/sparc/dbri.c 			dbri->dma->desc[desc].word4 = 0;
dma              1178 sound/sparc/dbri.c 			dbri->dma->desc[desc].word1 |= DBRI_TD_F | DBRI_TD_B;
dma              1180 sound/sparc/dbri.c 			dbri->dma->desc[desc].word1 = 0;
dma              1181 sound/sparc/dbri.c 			dbri->dma->desc[desc].word4 =
dma              1189 sound/sparc/dbri.c 			dbri->dma->desc[last_desc].nda =
dma              1204 sound/sparc/dbri.c 	dbri->dma->desc[last_desc].nda =
dma              1214 sound/sparc/dbri.c 			dbri->dma->desc[desc].word1,
dma              1215 sound/sparc/dbri.c 			dbri->dma->desc[desc].ba,
dma              1216 sound/sparc/dbri.c 			dbri->dma->desc[desc].nda, dbri->dma->desc[desc].word4);
dma              1793 sound/sparc/dbri.c 		status = DBRI_TD_STATUS(dbri->dma->desc[td].word4);
dma              1799 sound/sparc/dbri.c 		dbri->dma->desc[td].word4 = 0;	/* Reset it for next time. */
dma              1800 sound/sparc/dbri.c 		info->offset += DBRI_RD_CNT(dbri->dma->desc[td].word1);
dma              1824 sound/sparc/dbri.c 	status = dbri->dma->desc[rd].word1;
dma              1825 sound/sparc/dbri.c 	dbri->dma->desc[rd].word1 = 0;	/* Reset it for next time. */
dma              1883 sound/sparc/dbri.c 			dbri->dma->desc[td].word4 = 0;
dma              1917 sound/sparc/dbri.c 	while ((x = dbri->dma->intr[dbri->dbri_irqp]) != 0) {
dma              1918 sound/sparc/dbri.c 		dbri->dma->intr[dbri->dbri_irqp] = 0;
dma              2540 sound/sparc/dbri.c 	dbri->dma = dma_alloc_coherent(&op->dev, sizeof(struct dbri_dma),
dma              2542 sound/sparc/dbri.c 	if (!dbri->dma)
dma              2546 sound/sparc/dbri.c 		dbri->dma, dbri->dma_dvma);
dma              2555 sound/sparc/dbri.c 				  (void *)dbri->dma, dbri->dma_dvma);
dma              2565 sound/sparc/dbri.c 				  (void *)dbri->dma, dbri->dma_dvma);
dma              2591 sound/sparc/dbri.c 	if (dbri->dma)
dma              2594 sound/sparc/dbri.c 				  (void *)dbri->dma, dbri->dma_dvma);
dma               120 sound/usb/misc/ua101.c 			dma_addr_t dma;
dma              1064 sound/usb/misc/ua101.c 					   &stream->buffers[i].dma);
dma              1087 sound/usb/misc/ua101.c 				  stream->buffers[i].dma);
dma              1100 sound/usb/misc/ua101.c 		dma_addr_t dma = stream->buffers[b].dma;
dma              1113 sound/usb/misc/ua101.c 			urb->urb.transfer_dma = dma;
dma              1124 sound/usb/misc/ua101.c 			dma += max_packet_size;
dma                84 tools/include/uapi/linux/if_link.h 	__u8	dma;
dma              1428 tools/testing/nvdimm/test/nfit.c static void *__test_alloc(struct nfit_test *t, size_t size, dma_addr_t *dma,
dma              1436 tools/testing/nvdimm/test/nfit.c 	if (!buf || !nfit_res || !*dma)
dma              1445 tools/testing/nvdimm/test/nfit.c 	nfit_res->res.start = *dma;
dma              1446 tools/testing/nvdimm/test/nfit.c 	nfit_res->res.end = *dma + size - 1;
dma              1456 tools/testing/nvdimm/test/nfit.c 	if (*dma && size >= DIMM_SIZE)
dma              1457 tools/testing/nvdimm/test/nfit.c 		gen_pool_free(nfit_pool, *dma, size);
dma              1464 tools/testing/nvdimm/test/nfit.c static void *test_alloc(struct nfit_test *t, size_t size, dma_addr_t *dma)
dma              1472 tools/testing/nvdimm/test/nfit.c 		*dma = gen_pool_alloc_algo(nfit_pool, size,
dma              1475 tools/testing/nvdimm/test/nfit.c 		*dma = (unsigned long) buf;
dma              1476 tools/testing/nvdimm/test/nfit.c 	return __test_alloc(t, size, dma, buf);