Home
last modified time | relevance | path

Searched refs:dma_buff (Results 1 – 6 of 6) sorted by relevance

/linux-4.1.27/drivers/net/ethernet/cirrus/
Dcs89x0.c153 unsigned char *dma_buff; /* points to the beginning of the buffer */ member
452 memcpy(skb_put(skb, length - semi_cnt), lp->dma_buff, in dma_rx()
475 if (lp->dma_buff) { in release_dma_buff()
476 free_pages((unsigned long)(lp->dma_buff), in release_dma_buff()
478 lp->dma_buff = NULL; in release_dma_buff()
886 lp->dma_buff = (unsigned char *)__get_dma_pages(GFP_KERNEL, in net_open()
888 if (!lp->dma_buff) { in net_open()
895 (unsigned long)lp->dma_buff, in net_open()
896 (unsigned long)isa_virt_to_bus(lp->dma_buff)); in net_open()
897 if ((unsigned long)lp->dma_buff >= MAX_DMA_ADDRESS || in net_open()
[all …]
/linux-4.1.27/drivers/net/ethernet/intel/i40evf/
Di40e_adminq.c689 struct i40e_dma_mem *dma_buff = NULL; in i40evf_asq_send_command() local
775 dma_buff = &(hw->aq.asq.r.asq_bi[hw->aq.asq.next_to_use]); in i40evf_asq_send_command()
777 memcpy(dma_buff->va, buff, buff_size); in i40evf_asq_send_command()
784 cpu_to_le32(upper_32_bits(dma_buff->pa)); in i40evf_asq_send_command()
786 cpu_to_le32(lower_32_bits(dma_buff->pa)); in i40evf_asq_send_command()
820 memcpy(buff, dma_buff->va, buff_size); in i40evf_asq_send_command()
/linux-4.1.27/drivers/net/ethernet/intel/i40e/
Di40e_adminq.c738 struct i40e_dma_mem *dma_buff = NULL; in i40e_asq_send_command() local
824 dma_buff = &(hw->aq.asq.r.asq_bi[hw->aq.asq.next_to_use]); in i40e_asq_send_command()
826 memcpy(dma_buff->va, buff, buff_size); in i40e_asq_send_command()
833 cpu_to_le32(upper_32_bits(dma_buff->pa)); in i40e_asq_send_command()
835 cpu_to_le32(lower_32_bits(dma_buff->pa)); in i40e_asq_send_command()
869 memcpy(buff, dma_buff->va, buff_size); in i40e_asq_send_command()
/linux-4.1.27/drivers/video/fbdev/
Dpxafb.h116 struct pxafb_dma_buff *dma_buff; member
Dpxafb.c521 fbi->palette_cpu = (u16 *)&fbi->dma_buff->palette[0]; in pxafb_set_par()
1071 dma_desc = &fbi->dma_buff->dma_desc[dma]; in setup_frame_dma()
1082 pal_desc = &fbi->dma_buff->pal_desc[pal]; in setup_frame_dma()
1132 dma_desc = &fbi->dma_buff->dma_desc[DMA_CMD]; in setup_smart_dma()
1300 fbi->smart_cmds = (uint16_t *) fbi->dma_buff->cmd_buff; in pxafb_smart_init()
2165 fbi->dma_buff = dma_alloc_coherent(fbi->dev, fbi->dma_buff_size, in pxafb_probe()
2167 if (fbi->dma_buff == NULL) { in pxafb_probe()
2252 fbi->dma_buff, fbi->dma_buff_phys); in pxafb_probe()
2290 fbi->dma_buff, fbi->dma_buff_phys); in pxafb_remove()
/linux-4.1.27/drivers/net/ethernet/ti/
Dnetcp_core.c604 dma_addr_t dma_desc, dma_buff; in netcp_process_one_rx_packet() local
620 get_pkt_info(&dma_buff, &buf_len, &dma_desc, desc); in netcp_process_one_rx_packet()
630 dma_unmap_single(netcp->dev, dma_buff, buf_len, DMA_FROM_DEVICE); in netcp_process_one_rx_packet()
653 get_pkt_info(&dma_buff, &buf_len, &dma_desc, ndesc); in netcp_process_one_rx_packet()
656 if (likely(dma_buff && buf_len && page)) { in netcp_process_one_rx_packet()
657 dma_unmap_page(netcp->dev, dma_buff, PAGE_SIZE, in netcp_process_one_rx_packet()
661 (void *)dma_buff, buf_len, page); in netcp_process_one_rx_packet()
666 offset_in_page(dma_buff), buf_len, PAGE_SIZE); in netcp_process_one_rx_packet()