dma_list 50 arch/powerpc/platforms/cell/spufs/spu_restore.c unsigned int list = (unsigned int)&dma_list[0]; dma_list 51 arch/powerpc/platforms/cell/spufs/spu_restore.c unsigned int size = sizeof(dma_list); dma_list 50 arch/powerpc/platforms/cell/spufs/spu_save.c unsigned int list = (unsigned int)&dma_list[0]; dma_list 51 arch/powerpc/platforms/cell/spufs/spu_save.c unsigned int size = sizeof(dma_list); dma_list 38 arch/powerpc/platforms/cell/spufs/spu_utils.h struct dma_list_elem dma_list[15] __attribute__ ((aligned(8))); dma_list 92 arch/powerpc/platforms/cell/spufs/spu_utils.h dma_list[i].size = 16384; dma_list 93 arch/powerpc/platforms/cell/spufs/spu_utils.h dma_list[i].ea_low = ea_low; dma_list 672 drivers/block/rsxx/dma.c struct list_head dma_list[RSXX_MAX_TARGETS]; dma_list 690 drivers/block/rsxx/dma.c INIT_LIST_HEAD(&dma_list[i]); dma_list 701 drivers/block/rsxx/dma.c st = rsxx_queue_discard(card, &dma_list[tgt], laddr, dma_list 723 drivers/block/rsxx/dma.c st = rsxx_queue_dma(card, &dma_list[tgt], dma_list 741 drivers/block/rsxx/dma.c if (!list_empty(&dma_list[i])) { dma_list 744 drivers/block/rsxx/dma.c list_splice_tail(&dma_list[i], &card->ctrl[i].queue); dma_list 756 drivers/block/rsxx/dma.c rsxx_cleanup_dma_queue(&card->ctrl[i], &dma_list[i], dma_list 247 drivers/infiniband/core/umem_odp.c umem_odp->dma_list = kvcalloc( dma_list 248 drivers/infiniband/core/umem_odp.c pages, sizeof(*umem_odp->dma_list), GFP_KERNEL); dma_list 249 drivers/infiniband/core/umem_odp.c if (!umem_odp->dma_list) { dma_list 277 drivers/infiniband/core/umem_odp.c kvfree(umem_odp->dma_list); dma_list 458 drivers/infiniband/core/umem_odp.c kvfree(umem_odp->dma_list); dma_list 523 drivers/infiniband/core/umem_odp.c if (!(umem_odp->dma_list[page_index])) { dma_list 531 drivers/infiniband/core/umem_odp.c umem_odp->dma_list[page_index] = dma_addr | access_mask; dma_list 535 drivers/infiniband/core/umem_odp.c umem_odp->dma_list[page_index] |= access_mask; dma_list 737 drivers/infiniband/core/umem_odp.c dma_addr_t dma = umem_odp->dma_list[idx]; dma_list 759 drivers/infiniband/core/umem_odp.c umem_odp->dma_list[idx] = 0; dma_list 147 drivers/infiniband/hw/mlx5/mem.c to_ib_umem_odp(umem)->dma_list[offset + i]; dma_list 288 drivers/infiniband/hw/mlx5/odp.c if (umem_odp->dma_list[idx] & dma_list 201 drivers/infiniband/hw/mthca/mthca_allocator.c u64 *dma_list = NULL; dma_list 222 drivers/infiniband/hw/mthca/mthca_allocator.c dma_list = kmalloc_array(npages, sizeof(*dma_list), dma_list 224 drivers/infiniband/hw/mthca/mthca_allocator.c if (!dma_list) dma_list 228 drivers/infiniband/hw/mthca/mthca_allocator.c dma_list[i] = t + i * (1 << shift); dma_list 234 drivers/infiniband/hw/mthca/mthca_allocator.c dma_list = kmalloc_array(npages, sizeof(*dma_list), dma_list 236 drivers/infiniband/hw/mthca/mthca_allocator.c if (!dma_list) dma_list 255 drivers/infiniband/hw/mthca/mthca_allocator.c dma_list[i] = t; dma_list 263 drivers/infiniband/hw/mthca/mthca_allocator.c dma_list, shift, npages, dma_list 271 drivers/infiniband/hw/mthca/mthca_allocator.c kfree(dma_list); dma_list 279 drivers/infiniband/hw/mthca/mthca_allocator.c kfree(dma_list); dma_list 471 drivers/infiniband/hw/mthca/mthca_eq.c u64 *dma_list = NULL; dma_list 490 drivers/infiniband/hw/mthca/mthca_eq.c dma_list = kmalloc_array(npages, sizeof(*dma_list), GFP_KERNEL); dma_list 491 drivers/infiniband/hw/mthca/mthca_eq.c if (!dma_list) dma_list 505 drivers/infiniband/hw/mthca/mthca_eq.c dma_list[i] = t; dma_list 519 drivers/infiniband/hw/mthca/mthca_eq.c dma_list, PAGE_SHIFT, npages, dma_list 551 drivers/infiniband/hw/mthca/mthca_eq.c kfree(dma_list); dma_list 582 drivers/infiniband/hw/mthca/mthca_eq.c kfree(dma_list); dma_list 171 drivers/misc/genwqe/card_base.h dma_addr_t *dma_list; /* list of dma addresses per page */ dma_list 372 drivers/misc/genwqe/card_base.h dma_addr_t *dma_list); dma_list 956 drivers/misc/genwqe/card_dev.c &m->dma_list[page_offs]); dma_list 226 drivers/misc/genwqe/card_utils.c static void genwqe_unmap_pages(struct genwqe_dev *cd, dma_addr_t *dma_list, dma_list 232 drivers/misc/genwqe/card_utils.c for (i = 0; (i < num_pages) && (dma_list[i] != 0x0); i++) { dma_list 233 drivers/misc/genwqe/card_utils.c pci_unmap_page(pci_dev, dma_list[i], dma_list 235 drivers/misc/genwqe/card_utils.c dma_list[i] = 0x0; dma_list 241 drivers/misc/genwqe/card_utils.c dma_addr_t *dma_list) dma_list 250 drivers/misc/genwqe/card_utils.c dma_list[i] = 0x0; dma_list 263 drivers/misc/genwqe/card_utils.c dma_list[i] = daddr; dma_list 268 drivers/misc/genwqe/card_utils.c genwqe_unmap_pages(cd, dma_list, num_pages); dma_list 372 drivers/misc/genwqe/card_utils.c dma_addr_t *dma_list) dma_list 407 drivers/misc/genwqe/card_utils.c daddr = dma_list[p] + map_offs; dma_list 597 drivers/misc/genwqe/card_utils.c m->dma_list = (dma_addr_t *)(m->page_list + m->nr_pages); dma_list 614 drivers/misc/genwqe/card_utils.c rc = genwqe_map_pages(cd, m->page_list, m->nr_pages, m->dma_list); dma_list 626 drivers/misc/genwqe/card_utils.c m->dma_list = NULL; dma_list 649 drivers/misc/genwqe/card_utils.c if (m->dma_list) dma_list 650 drivers/misc/genwqe/card_utils.c genwqe_unmap_pages(cd, m->dma_list, m->nr_pages); dma_list 657 drivers/misc/genwqe/card_utils.c m->dma_list = NULL; dma_list 976 drivers/net/ethernet/mellanox/mlx4/eq.c u64 *dma_list = NULL; dma_list 997 drivers/net/ethernet/mellanox/mlx4/eq.c dma_list = kmalloc_array(npages, sizeof(*dma_list), GFP_KERNEL); dma_list 998 drivers/net/ethernet/mellanox/mlx4/eq.c if (!dma_list) dma_list 1014 drivers/net/ethernet/mellanox/mlx4/eq.c dma_list[i] = t; dma_list 1032 drivers/net/ethernet/mellanox/mlx4/eq.c err = mlx4_write_mtt(dev, &eq->mtt, 0, npages, dma_list); dma_list 1052 drivers/net/ethernet/mellanox/mlx4/eq.c kfree(dma_list); dma_list 1082 drivers/net/ethernet/mellanox/mlx4/eq.c kfree(dma_list); dma_list 68 drivers/vfio/vfio_iommu_type1.c struct rb_root dma_list; dma_list 137 drivers/vfio/vfio_iommu_type1.c struct rb_node *node = iommu->dma_list.rb_node; dma_list 155 drivers/vfio/vfio_iommu_type1.c struct rb_node **link = &iommu->dma_list.rb_node, *parent = NULL; dma_list 169 drivers/vfio/vfio_iommu_type1.c rb_insert_color(&new->node, &iommu->dma_list); dma_list 174 drivers/vfio/vfio_iommu_type1.c rb_erase(&old->node, &iommu->dma_list); dma_list 1197 drivers/vfio/vfio_iommu_type1.c n = rb_first(&iommu->dma_list); dma_list 1858 drivers/vfio/vfio_iommu_type1.c while ((node = rb_first(&iommu->dma_list))) dma_list 1866 drivers/vfio/vfio_iommu_type1.c n = rb_first(&iommu->dma_list); dma_list 1889 drivers/vfio/vfio_iommu_type1.c n = rb_first(&iommu->dma_list); dma_list 2081 drivers/vfio/vfio_iommu_type1.c iommu->dma_list = RB_ROOT; dma_list 944 drivers/vme/vme.c struct vme_dma_list *dma_list; dma_list 951 drivers/vme/vme.c dma_list = kmalloc(sizeof(*dma_list), GFP_KERNEL); dma_list 952 drivers/vme/vme.c if (!dma_list) dma_list 955 drivers/vme/vme.c INIT_LIST_HEAD(&dma_list->entries); dma_list 956 drivers/vme/vme.c dma_list->parent = list_entry(resource->entry, dma_list 959 drivers/vme/vme.c mutex_init(&dma_list->mtx); dma_list 961 drivers/vme/vme.c return dma_list; dma_list 56 include/rdma/ib_umem_odp.h dma_addr_t *dma_list; dma_list 745 sound/pci/maestro3.c struct m3_list dma_list; dma_list 1743 sound/pci/maestro3.c s->index_list[2] = &chip->dma_list; dma_list 2170 sound/pci/maestro3.c chip->dma_list.curlen = 0; dma_list 2171 sound/pci/maestro3.c chip->dma_list.mem_addr = KDATA_DMA_XFER0; dma_list 2172 sound/pci/maestro3.c chip->dma_list.max = MAX_VIRTUAL_DMA_CHANNELS;