dma_mem           382 drivers/infiniband/hw/i40iw/i40iw_verbs.c 	i40iw_free_dma_mem(iwdev->sc_dev.hw, &iwqp->kqp.dma_mem);
dma_mem           461 drivers/infiniband/hw/i40iw/i40iw_verbs.c 	struct i40iw_dma_mem *mem = &iwqp->kqp.dma_mem;
dma_mem           129 drivers/infiniband/hw/i40iw/i40iw_verbs.h 	struct i40iw_dma_mem dma_mem;
dma_mem           132 drivers/net/ethernet/emulex/benet/be.h 	struct be_dma_mem dma_mem;
dma_mem           154 drivers/net/ethernet/emulex/benet/be.h 	return q->dma_mem.va + q->head * q->entry_size;
dma_mem           159 drivers/net/ethernet/emulex/benet/be.h 	return q->dma_mem.va + q->tail * q->entry_size;
dma_mem           164 drivers/net/ethernet/emulex/benet/be.h 	return q->dma_mem.va + index * q->entry_size;
dma_mem           996 drivers/net/ethernet/emulex/benet/be_cmds.c 	struct be_dma_mem *q_mem = &eqo->q.dma_mem;
dma_mem          1161 drivers/net/ethernet/emulex/benet/be_cmds.c 	struct be_dma_mem *q_mem = &cq->dma_mem;
dma_mem          1239 drivers/net/ethernet/emulex/benet/be_cmds.c 	struct be_dma_mem *q_mem = &mccq->dma_mem;
dma_mem          1304 drivers/net/ethernet/emulex/benet/be_cmds.c 	struct be_dma_mem *q_mem = &mccq->dma_mem;
dma_mem          1363 drivers/net/ethernet/emulex/benet/be_cmds.c 	struct be_dma_mem *q_mem = &txq->dma_mem;
dma_mem          1411 drivers/net/ethernet/emulex/benet/be_cmds.c 	struct be_dma_mem *q_mem = &rxq->dma_mem;
dma_mem           148 drivers/net/ethernet/emulex/benet/be_main.c 	struct be_dma_mem *mem = &q->dma_mem;
dma_mem           160 drivers/net/ethernet/emulex/benet/be_main.c 	struct be_dma_mem *mem = &q->dma_mem;
dma_mem          1437 drivers/net/ethernet/emulex/benet/be_main.c 		entry = txo->q.dma_mem.va;
dma_mem          1447 drivers/net/ethernet/emulex/benet/be_main.c 		entry = txo->cq.dma_mem.va;
dma_mem          2962 drivers/net/ethernet/freescale/dpaa2/dpaa2-eth.c 	u8 *dma_mem;
dma_mem          2992 drivers/net/ethernet/freescale/dpaa2/dpaa2-eth.c 	dma_mem = kzalloc(DPAA2_CLASSIFIER_DMA_SIZE, GFP_KERNEL);
dma_mem          2993 drivers/net/ethernet/freescale/dpaa2/dpaa2-eth.c 	if (!dma_mem)
dma_mem          2996 drivers/net/ethernet/freescale/dpaa2/dpaa2-eth.c 	err = dpni_prepare_key_cfg(&cls_cfg, dma_mem);
dma_mem          3003 drivers/net/ethernet/freescale/dpaa2/dpaa2-eth.c 	key_iova = dma_map_single(dev, dma_mem, DPAA2_CLASSIFIER_DMA_SIZE,
dma_mem          3026 drivers/net/ethernet/freescale/dpaa2/dpaa2-eth.c 	kfree(dma_mem);
dma_mem            31 drivers/scsi/be2iscsi/be.h 	struct be_dma_mem dma_mem;
dma_mem            53 drivers/scsi/be2iscsi/be.h 	return q->dma_mem.va + q->head * q->entry_size;
dma_mem            58 drivers/scsi/be2iscsi/be.h 	return q->dma_mem.va + wrb_num * q->entry_size;
dma_mem            63 drivers/scsi/be2iscsi/be.h 	return q->dma_mem.va + q->tail * q->entry_size;
dma_mem           741 drivers/scsi/be2iscsi/be_cmds.c 	struct be_dma_mem *q_mem = &eq->dma_mem;
dma_mem           783 drivers/scsi/be2iscsi/be_cmds.c 	struct be_dma_mem *q_mem = &cq->dma_mem;
dma_mem           856 drivers/scsi/be2iscsi/be_cmds.c 	struct be_dma_mem *q_mem = &mccq->dma_mem;
dma_mem           982 drivers/scsi/be2iscsi/be_cmds.c 	struct be_dma_mem *q_mem = &dq->dma_mem;
dma_mem          2981 drivers/scsi/be2iscsi/be_main.c 	struct be_dma_mem *mem = &q->dma_mem;
dma_mem          3013 drivers/scsi/be2iscsi/be_main.c 		mem = &eq->dma_mem;
dma_mem          3051 drivers/scsi/be2iscsi/be_main.c 		mem = &eq->dma_mem;
dma_mem          3080 drivers/scsi/be2iscsi/be_main.c 		mem = &cq->dma_mem;
dma_mem          3116 drivers/scsi/be2iscsi/be_main.c 		mem = &cq->dma_mem;
dma_mem          3141 drivers/scsi/be2iscsi/be_main.c 	mem = &dq->dma_mem;
dma_mem          3193 drivers/scsi/be2iscsi/be_main.c 	mem = &dataq->dma_mem;
dma_mem          3308 drivers/scsi/be2iscsi/be_main.c 	struct be_dma_mem *mem = &q->dma_mem;
dma_mem          3319 drivers/scsi/be2iscsi/be_main.c 	struct be_dma_mem *mem = &q->dma_mem;
dma_mem           311 drivers/scsi/stex.c 	void *dma_mem;
dma_mem           411 drivers/scsi/stex.c 	struct req_msg *req = hba->dma_mem + hba->req_head * hba->rq_size;
dma_mem           421 drivers/scsi/stex.c 	return (struct req_msg *)(hba->dma_mem +
dma_mem          1146 drivers/scsi/stex.c 	msg_h = (struct st_msg_header *)hba->dma_mem;
dma_mem          1736 drivers/scsi/stex.c 	hba->dma_mem = dma_alloc_coherent(&pdev->dev,
dma_mem          1738 drivers/scsi/stex.c 	if (!hba->dma_mem) {
dma_mem          1747 drivers/scsi/stex.c 			hba->dma_mem = dma_alloc_coherent(&pdev->dev,
dma_mem          1751 drivers/scsi/stex.c 		if (!hba->dma_mem) {
dma_mem          1768 drivers/scsi/stex.c 		hba->scratch = (__le32 *)(hba->dma_mem + scratch_offset);
dma_mem          1769 drivers/scsi/stex.c 	hba->status_buffer = (struct status_msg *)(hba->dma_mem + sts_offset);
dma_mem          1770 drivers/scsi/stex.c 	hba->copy_buffer = hba->dma_mem + cp_offset;
dma_mem          1839 drivers/scsi/stex.c 			  hba->dma_mem, hba->dma_handle);
dma_mem          1927 drivers/scsi/stex.c 			  hba->dma_mem, hba->dma_handle);
dma_mem           632 drivers/staging/fsl-dpaa2/ethsw/ethsw.c 	u8 *dma_mem;
dma_mem           635 drivers/staging/fsl-dpaa2/ethsw/ethsw.c 	dma_mem = kzalloc(fdb_dump_size, GFP_KERNEL);
dma_mem           636 drivers/staging/fsl-dpaa2/ethsw/ethsw.c 	if (!dma_mem)
dma_mem           639 drivers/staging/fsl-dpaa2/ethsw/ethsw.c 	fdb_dump_iova = dma_map_single(dev, dma_mem, fdb_dump_size,
dma_mem           656 drivers/staging/fsl-dpaa2/ethsw/ethsw.c 	fdb_entries = (struct fdb_dump_entry *)dma_mem;
dma_mem           671 drivers/staging/fsl-dpaa2/ethsw/ethsw.c 	kfree(dma_mem);
dma_mem           678 drivers/staging/fsl-dpaa2/ethsw/ethsw.c 	kfree(dma_mem);
dma_mem          1281 include/linux/device.h 	struct dma_coherent_mem	*dma_mem; /* internal for coherent mem
dma_mem            26 kernel/dma/coherent.c 	if (dev && dev->dma_mem)
dma_mem            27 kernel/dma/coherent.c 		return dev->dma_mem;
dma_mem            44 kernel/dma/coherent.c 	struct dma_coherent_mem *dma_mem = NULL;
dma_mem            60 kernel/dma/coherent.c 	dma_mem = kzalloc(sizeof(struct dma_coherent_mem), GFP_KERNEL);
dma_mem            61 kernel/dma/coherent.c 	if (!dma_mem) {
dma_mem            65 kernel/dma/coherent.c 	dma_mem->bitmap = kzalloc(bitmap_size, GFP_KERNEL);
dma_mem            66 kernel/dma/coherent.c 	if (!dma_mem->bitmap) {
dma_mem            71 kernel/dma/coherent.c 	dma_mem->virt_base = mem_base;
dma_mem            72 kernel/dma/coherent.c 	dma_mem->device_base = device_addr;
dma_mem            73 kernel/dma/coherent.c 	dma_mem->pfn_base = PFN_DOWN(phys_addr);
dma_mem            74 kernel/dma/coherent.c 	dma_mem->size = pages;
dma_mem            75 kernel/dma/coherent.c 	spin_lock_init(&dma_mem->spinlock);
dma_mem            77 kernel/dma/coherent.c 	*mem = dma_mem;
dma_mem            81 kernel/dma/coherent.c 	kfree(dma_mem);
dma_mem           103 kernel/dma/coherent.c 	if (dev->dma_mem)
dma_mem           106 kernel/dma/coherent.c 	dev->dma_mem = mem;
dma_mem           327 kernel/dma/coherent.c 		dev->dma_mem = NULL;