dma_buff          155 drivers/net/ethernet/cirrus/cs89x0.c 	unsigned char *dma_buff;	/* points to the beginning of the buffer */
dma_buff          454 drivers/net/ethernet/cirrus/cs89x0.c 		skb_put_data(skb, lp->dma_buff, length - semi_cnt);
dma_buff          476 drivers/net/ethernet/cirrus/cs89x0.c 	if (lp->dma_buff) {
dma_buff          477 drivers/net/ethernet/cirrus/cs89x0.c 		free_pages((unsigned long)(lp->dma_buff),
dma_buff          479 drivers/net/ethernet/cirrus/cs89x0.c 		lp->dma_buff = NULL;
dma_buff          887 drivers/net/ethernet/cirrus/cs89x0.c 		lp->dma_buff = (unsigned char *)__get_dma_pages(GFP_KERNEL,
dma_buff          889 drivers/net/ethernet/cirrus/cs89x0.c 		if (!lp->dma_buff) {
dma_buff          896 drivers/net/ethernet/cirrus/cs89x0.c 			 (unsigned long)lp->dma_buff,
dma_buff          897 drivers/net/ethernet/cirrus/cs89x0.c 			 (unsigned long)isa_virt_to_bus(lp->dma_buff));
dma_buff          898 drivers/net/ethernet/cirrus/cs89x0.c 		if ((unsigned long)lp->dma_buff >= MAX_DMA_ADDRESS ||
dma_buff          899 drivers/net/ethernet/cirrus/cs89x0.c 		    !dma_page_eq(lp->dma_buff,
dma_buff          900 drivers/net/ethernet/cirrus/cs89x0.c 				 lp->dma_buff + lp->dmasize * 1024 - 1)) {
dma_buff          904 drivers/net/ethernet/cirrus/cs89x0.c 		memset(lp->dma_buff, 0, lp->dmasize * 1024);	/* Why? */
dma_buff          911 drivers/net/ethernet/cirrus/cs89x0.c 		lp->rx_dma_ptr = lp->dma_buff;
dma_buff          912 drivers/net/ethernet/cirrus/cs89x0.c 		lp->end_dma_buff = lp->dma_buff + lp->dmasize * 1024;
dma_buff          917 drivers/net/ethernet/cirrus/cs89x0.c 		set_dma_addr(dev->dma, isa_virt_to_bus(lp->dma_buff));
dma_buff          737 drivers/net/ethernet/intel/i40e/i40e_adminq.c 	struct i40e_dma_mem *dma_buff = NULL;
dma_buff          825 drivers/net/ethernet/intel/i40e/i40e_adminq.c 		dma_buff = &(hw->aq.asq.r.asq_bi[hw->aq.asq.next_to_use]);
dma_buff          827 drivers/net/ethernet/intel/i40e/i40e_adminq.c 		memcpy(dma_buff->va, buff, buff_size);
dma_buff          834 drivers/net/ethernet/intel/i40e/i40e_adminq.c 				cpu_to_le32(upper_32_bits(dma_buff->pa));
dma_buff          836 drivers/net/ethernet/intel/i40e/i40e_adminq.c 				cpu_to_le32(lower_32_bits(dma_buff->pa));
dma_buff          870 drivers/net/ethernet/intel/i40e/i40e_adminq.c 			memcpy(buff, dma_buff->va, buff_size);
dma_buff          638 drivers/net/ethernet/intel/iavf/iavf_adminq.c 	struct iavf_dma_mem *dma_buff = NULL;
dma_buff          727 drivers/net/ethernet/intel/iavf/iavf_adminq.c 		dma_buff = &hw->aq.asq.r.asq_bi[hw->aq.asq.next_to_use];
dma_buff          729 drivers/net/ethernet/intel/iavf/iavf_adminq.c 		memcpy(dma_buff->va, buff, buff_size);
dma_buff          736 drivers/net/ethernet/intel/iavf/iavf_adminq.c 				cpu_to_le32(upper_32_bits(dma_buff->pa));
dma_buff          738 drivers/net/ethernet/intel/iavf/iavf_adminq.c 				cpu_to_le32(lower_32_bits(dma_buff->pa));
dma_buff          772 drivers/net/ethernet/intel/iavf/iavf_adminq.c 			memcpy(buff, dma_buff->va, buff_size);
dma_buff          647 drivers/net/ethernet/ti/netcp_core.c 	dma_addr_t dma_desc, dma_buff;
dma_buff          663 drivers/net/ethernet/ti/netcp_core.c 	get_pkt_info(&dma_buff, &buf_len, &dma_desc, desc);
dma_buff          677 drivers/net/ethernet/ti/netcp_core.c 	dma_unmap_single(netcp->dev, dma_buff, buf_len, DMA_FROM_DEVICE);
dma_buff          700 drivers/net/ethernet/ti/netcp_core.c 		get_pkt_info(&dma_buff, &buf_len, &dma_desc, ndesc);
dma_buff          706 drivers/net/ethernet/ti/netcp_core.c 		if (likely(dma_buff && buf_len && page)) {
dma_buff          707 drivers/net/ethernet/ti/netcp_core.c 			dma_unmap_page(netcp->dev, dma_buff, PAGE_SIZE,
dma_buff          711 drivers/net/ethernet/ti/netcp_core.c 				&dma_buff, buf_len, page);
dma_buff          716 drivers/net/ethernet/ti/netcp_core.c 				offset_in_page(dma_buff), buf_len, PAGE_SIZE);
dma_buff          773 drivers/usb/dwc2/gadget.c 						 dma_addr_t dma_buff,
dma_buff          802 drivers/usb/dwc2/gadget.c 			(*desc)->buf = dma_buff + offset;
dma_buff          817 drivers/usb/dwc2/gadget.c 			(*desc)->buf = dma_buff + offset;
dma_buff          838 drivers/usb/dwc2/gadget.c 						 dma_addr_t dma_buff,
dma_buff          853 drivers/usb/dwc2/gadget.c 			dma_buff, len, true);
dma_buff          880 drivers/usb/dwc2/gadget.c 				      dma_addr_t dma_buff, unsigned int len)
dma_buff          911 drivers/usb/dwc2/gadget.c 	desc->buf = dma_buff;
dma_buff          525 drivers/video/fbdev/pxafb.c 	fbi->palette_cpu = (u16 *)&fbi->dma_buff->palette[0];
dma_buff         1075 drivers/video/fbdev/pxafb.c 	dma_desc = &fbi->dma_buff->dma_desc[dma];
dma_buff         1086 drivers/video/fbdev/pxafb.c 		pal_desc = &fbi->dma_buff->pal_desc[pal];
dma_buff         1136 drivers/video/fbdev/pxafb.c 	dma_desc = &fbi->dma_buff->dma_desc[DMA_CMD];
dma_buff         1302 drivers/video/fbdev/pxafb.c 	fbi->smart_cmds = (uint16_t *) fbi->dma_buff->cmd_buff;
dma_buff         2321 drivers/video/fbdev/pxafb.c 	fbi->dma_buff = dma_alloc_coherent(fbi->dev, fbi->dma_buff_size,
dma_buff         2323 drivers/video/fbdev/pxafb.c 	if (fbi->dma_buff == NULL) {
dma_buff         2403 drivers/video/fbdev/pxafb.c 			fbi->dma_buff, fbi->dma_buff_phys);
dma_buff         2428 drivers/video/fbdev/pxafb.c 	dma_free_wc(&dev->dev, fbi->dma_buff_size, fbi->dma_buff,
dma_buff          116 drivers/video/fbdev/pxafb.h 	struct pxafb_dma_buff	*dma_buff;