dmem 151 drivers/gpu/drm/nouveau/nouveau_dmem.c if (drm->dmem->migrate.copy_func(drm, 1, NOUVEAU_APER_HOST, *dma_addr, dmem 167 drivers/gpu/drm/nouveau/nouveau_dmem.c struct nouveau_dmem *dmem = page_to_dmem(vmf->page); dmem 168 drivers/gpu/drm/nouveau/nouveau_dmem.c struct nouveau_drm *drm = dmem->drm; dmem 195 drivers/gpu/drm/nouveau/nouveau_dmem.c nouveau_fence_new(dmem->migrate.chan, false, &fence); dmem 215 drivers/gpu/drm/nouveau/nouveau_dmem.c if (drm->dmem == NULL) dmem 218 drivers/gpu/drm/nouveau/nouveau_dmem.c mutex_lock(&drm->dmem->mutex); dmem 219 drivers/gpu/drm/nouveau/nouveau_dmem.c chunk = list_first_entry_or_null(&drm->dmem->chunk_empty, dmem 223 drivers/gpu/drm/nouveau/nouveau_dmem.c mutex_unlock(&drm->dmem->mutex); dmem 228 drivers/gpu/drm/nouveau/nouveau_dmem.c mutex_unlock(&drm->dmem->mutex); dmem 246 drivers/gpu/drm/nouveau/nouveau_dmem.c mutex_lock(&drm->dmem->mutex); dmem 248 drivers/gpu/drm/nouveau/nouveau_dmem.c list_add(&chunk->list, &drm->dmem->chunk_empty); dmem 250 drivers/gpu/drm/nouveau/nouveau_dmem.c list_add_tail(&chunk->list, &drm->dmem->chunk_empty); dmem 251 drivers/gpu/drm/nouveau/nouveau_dmem.c mutex_unlock(&drm->dmem->mutex); dmem 261 drivers/gpu/drm/nouveau/nouveau_dmem.c chunk = list_first_entry_or_null(&drm->dmem->chunk_free, dmem 267 drivers/gpu/drm/nouveau/nouveau_dmem.c chunk = list_first_entry_or_null(&drm->dmem->chunk_empty, dmem 287 drivers/gpu/drm/nouveau/nouveau_dmem.c mutex_lock(&drm->dmem->mutex); dmem 293 drivers/gpu/drm/nouveau/nouveau_dmem.c mutex_unlock(&drm->dmem->mutex); dmem 300 drivers/gpu/drm/nouveau/nouveau_dmem.c mutex_lock(&drm->dmem->mutex); dmem 317 drivers/gpu/drm/nouveau/nouveau_dmem.c mutex_unlock(&drm->dmem->mutex); dmem 353 drivers/gpu/drm/nouveau/nouveau_dmem.c if (drm->dmem == NULL) dmem 356 drivers/gpu/drm/nouveau/nouveau_dmem.c mutex_lock(&drm->dmem->mutex); dmem 357 drivers/gpu/drm/nouveau/nouveau_dmem.c list_for_each_entry (chunk, &drm->dmem->chunk_free, list) { dmem 362 drivers/gpu/drm/nouveau/nouveau_dmem.c list_for_each_entry (chunk, &drm->dmem->chunk_full, list) { dmem 367 drivers/gpu/drm/nouveau/nouveau_dmem.c mutex_unlock(&drm->dmem->mutex); dmem 375 drivers/gpu/drm/nouveau/nouveau_dmem.c if (drm->dmem == NULL) dmem 378 drivers/gpu/drm/nouveau/nouveau_dmem.c mutex_lock(&drm->dmem->mutex); dmem 379 drivers/gpu/drm/nouveau/nouveau_dmem.c list_for_each_entry (chunk, &drm->dmem->chunk_free, list) { dmem 382 drivers/gpu/drm/nouveau/nouveau_dmem.c list_for_each_entry (chunk, &drm->dmem->chunk_full, list) { dmem 385 drivers/gpu/drm/nouveau/nouveau_dmem.c mutex_unlock(&drm->dmem->mutex); dmem 393 drivers/gpu/drm/nouveau/nouveau_dmem.c if (drm->dmem == NULL) dmem 396 drivers/gpu/drm/nouveau/nouveau_dmem.c mutex_lock(&drm->dmem->mutex); dmem 398 drivers/gpu/drm/nouveau/nouveau_dmem.c WARN_ON(!list_empty(&drm->dmem->chunk_free)); dmem 399 drivers/gpu/drm/nouveau/nouveau_dmem.c WARN_ON(!list_empty(&drm->dmem->chunk_full)); dmem 401 drivers/gpu/drm/nouveau/nouveau_dmem.c list_for_each_entry_safe (chunk, tmp, &drm->dmem->chunk_empty, list) { dmem 410 drivers/gpu/drm/nouveau/nouveau_dmem.c mutex_unlock(&drm->dmem->mutex); dmem 418 drivers/gpu/drm/nouveau/nouveau_dmem.c struct nouveau_channel *chan = drm->dmem->migrate.chan; dmem 480 drivers/gpu/drm/nouveau/nouveau_dmem.c drm->dmem->migrate.copy_func = nvc0b5_migrate_copy; dmem 481 drivers/gpu/drm/nouveau/nouveau_dmem.c drm->dmem->migrate.chan = drm->ttm.chan; dmem 501 drivers/gpu/drm/nouveau/nouveau_dmem.c if (!(drm->dmem = kzalloc(sizeof(*drm->dmem), GFP_KERNEL))) dmem 504 drivers/gpu/drm/nouveau/nouveau_dmem.c drm->dmem->drm = drm; dmem 505 drivers/gpu/drm/nouveau/nouveau_dmem.c mutex_init(&drm->dmem->mutex); dmem 506 drivers/gpu/drm/nouveau/nouveau_dmem.c INIT_LIST_HEAD(&drm->dmem->chunk_free); dmem 507 drivers/gpu/drm/nouveau/nouveau_dmem.c INIT_LIST_HEAD(&drm->dmem->chunk_full); dmem 508 drivers/gpu/drm/nouveau/nouveau_dmem.c INIT_LIST_HEAD(&drm->dmem->chunk_empty); dmem 526 drivers/gpu/drm/nouveau/nouveau_dmem.c drm->dmem->pagemap.type = MEMORY_DEVICE_PRIVATE; dmem 527 drivers/gpu/drm/nouveau/nouveau_dmem.c drm->dmem->pagemap.res = *res; dmem 528 drivers/gpu/drm/nouveau/nouveau_dmem.c drm->dmem->pagemap.ops = &nouveau_dmem_pagemap_ops; dmem 529 drivers/gpu/drm/nouveau/nouveau_dmem.c if (IS_ERR(devm_memremap_pages(device, &drm->dmem->pagemap))) dmem 546 drivers/gpu/drm/nouveau/nouveau_dmem.c list_add_tail(&chunk->list, &drm->dmem->chunk_empty); dmem 556 drivers/gpu/drm/nouveau/nouveau_dmem.c kfree(drm->dmem); dmem 557 drivers/gpu/drm/nouveau/nouveau_dmem.c drm->dmem = NULL; dmem 578 drivers/gpu/drm/nouveau/nouveau_dmem.c if (drm->dmem->migrate.copy_func(drm, 1, NOUVEAU_APER_VRAM, dmem 607 drivers/gpu/drm/nouveau/nouveau_dmem.c nouveau_fence_new(drm->dmem->migrate.chan, false, &fence); dmem 675 drivers/gpu/drm/nouveau/nouveau_dmem.c return is_device_private_page(page) && drm->dmem == page_to_dmem(page); dmem 215 drivers/gpu/drm/nouveau/nouveau_drv.h struct nouveau_dmem *dmem; dmem 185 drivers/memory/brcmstb_dpfe.c void __iomem *dmem; dmem 372 drivers/memory/brcmstb_dpfe.c ptr = priv->dmem + offset; dmem 525 drivers/memory/brcmstb_dpfe.c u32 __iomem *dmem = priv->dmem; dmem 542 drivers/memory/brcmstb_dpfe.c sum += readl_relaxed(dmem + i); dmem 583 drivers/memory/brcmstb_dpfe.c const u32 *dmem, *imem; dmem 631 drivers/memory/brcmstb_dpfe.c dmem = fw_blob + imem_size; dmem 633 drivers/memory/brcmstb_dpfe.c ret = __write_firmware(priv->dmem, dmem, dmem_size, is_big_endian); dmem 831 drivers/memory/brcmstb_dpfe.c priv->dmem = devm_ioremap_resource(dev, res); dmem 832 drivers/memory/brcmstb_dpfe.c if (IS_ERR(priv->dmem)) { dmem 880 drivers/net/ethernet/cavium/thunder/nicvf_main.c cqe_head &= (cq->dmem.q_len - 1); dmem 53 drivers/net/ethernet/cavium/thunder/nicvf_queues.c static int nicvf_alloc_q_desc_mem(struct nicvf *nic, struct q_desc_mem *dmem, dmem 56 drivers/net/ethernet/cavium/thunder/nicvf_queues.c dmem->q_len = q_len; dmem 57 drivers/net/ethernet/cavium/thunder/nicvf_queues.c dmem->size = (desc_size * q_len) + align_bytes; dmem 59 drivers/net/ethernet/cavium/thunder/nicvf_queues.c dmem->unalign_base = dma_alloc_coherent(&nic->pdev->dev, dmem->size, dmem 60 drivers/net/ethernet/cavium/thunder/nicvf_queues.c &dmem->dma, GFP_KERNEL); dmem 61 drivers/net/ethernet/cavium/thunder/nicvf_queues.c if (!dmem->unalign_base) dmem 65 drivers/net/ethernet/cavium/thunder/nicvf_queues.c dmem->phys_base = NICVF_ALIGNED_ADDR((u64)dmem->dma, align_bytes); dmem 66 drivers/net/ethernet/cavium/thunder/nicvf_queues.c dmem->base = dmem->unalign_base + (dmem->phys_base - dmem->dma); dmem 71 drivers/net/ethernet/cavium/thunder/nicvf_queues.c static void nicvf_free_q_desc_mem(struct nicvf *nic, struct q_desc_mem *dmem) dmem 73 drivers/net/ethernet/cavium/thunder/nicvf_queues.c if (!dmem) dmem 76 drivers/net/ethernet/cavium/thunder/nicvf_queues.c dma_free_coherent(&nic->pdev->dev, dmem->size, dmem 77 drivers/net/ethernet/cavium/thunder/nicvf_queues.c dmem->unalign_base, dmem->dma); dmem 78 drivers/net/ethernet/cavium/thunder/nicvf_queues.c dmem->unalign_base = NULL; dmem 79 drivers/net/ethernet/cavium/thunder/nicvf_queues.c dmem->base = NULL; dmem 260 drivers/net/ethernet/cavium/thunder/nicvf_queues.c err = nicvf_alloc_q_desc_mem(nic, &rbdr->dmem, ring_len, dmem 266 drivers/net/ethernet/cavium/thunder/nicvf_queues.c rbdr->desc = rbdr->dmem.base; dmem 329 drivers/net/ethernet/cavium/thunder/nicvf_queues.c if (!rbdr->dmem.base) dmem 345 drivers/net/ethernet/cavium/thunder/nicvf_queues.c head &= (rbdr->dmem.q_len - 1); dmem 374 drivers/net/ethernet/cavium/thunder/nicvf_queues.c nicvf_free_q_desc_mem(nic, &rbdr->dmem); dmem 415 drivers/net/ethernet/cavium/thunder/nicvf_queues.c tail &= (rbdr->dmem.q_len - 1); dmem 480 drivers/net/ethernet/cavium/thunder/nicvf_queues.c err = nicvf_alloc_q_desc_mem(nic, &cq->dmem, q_len, CMP_QUEUE_DESC_SIZE, dmem 485 drivers/net/ethernet/cavium/thunder/nicvf_queues.c cq->desc = cq->dmem.base; dmem 496 drivers/net/ethernet/cavium/thunder/nicvf_queues.c if (!cq->dmem.base) dmem 499 drivers/net/ethernet/cavium/thunder/nicvf_queues.c nicvf_free_q_desc_mem(nic, &cq->dmem); dmem 508 drivers/net/ethernet/cavium/thunder/nicvf_queues.c err = nicvf_alloc_q_desc_mem(nic, &sq->dmem, q_len, SND_QUEUE_DESC_SIZE, dmem 513 drivers/net/ethernet/cavium/thunder/nicvf_queues.c sq->desc = sq->dmem.base; dmem 562 drivers/net/ethernet/cavium/thunder/nicvf_queues.c hdr_sqe &= (sq->dmem.q_len - 1); dmem 580 drivers/net/ethernet/cavium/thunder/nicvf_queues.c if (!sq->dmem.base) dmem 585 drivers/net/ethernet/cavium/thunder/nicvf_queues.c sq->dmem.q_len * TSO_HEADER_SIZE, dmem 619 drivers/net/ethernet/cavium/thunder/nicvf_queues.c sq->head &= (sq->dmem.q_len - 1); dmem 623 drivers/net/ethernet/cavium/thunder/nicvf_queues.c nicvf_free_q_desc_mem(nic, &sq->dmem); dmem 840 drivers/net/ethernet/cavium/thunder/nicvf_queues.c qidx, (u64)(cq->dmem.phys_base)); dmem 889 drivers/net/ethernet/cavium/thunder/nicvf_queues.c qidx, (u64)(sq->dmem.phys_base)); dmem 929 drivers/net/ethernet/cavium/thunder/nicvf_queues.c qidx, (u64)(rbdr->dmem.phys_base)); dmem 1128 drivers/net/ethernet/cavium/thunder/nicvf_queues.c sq->tail &= (sq->dmem.q_len - 1); dmem 1149 drivers/net/ethernet/cavium/thunder/nicvf_queues.c sq->head &= (sq->dmem.q_len - 1); dmem 1155 drivers/net/ethernet/cavium/thunder/nicvf_queues.c qentry &= (sq->dmem.q_len - 1); dmem 1424 drivers/net/ethernet/cavium/thunder/nicvf_queues.c qentry &= (sq->dmem.q_len - 1); dmem 229 drivers/net/ethernet/cavium/thunder/nicvf_queues.h struct q_desc_mem dmem; dmem 260 drivers/net/ethernet/cavium/thunder/nicvf_queues.h struct q_desc_mem dmem; dmem 284 drivers/net/ethernet/cavium/thunder/nicvf_queues.h struct q_desc_mem dmem; dmem 1938 drivers/staging/media/ipu3/ipu3-abi.h } dmem; dmem 45 drivers/staging/media/ipu3/ipu3-css-fw.h } dmem; dmem 58 drivers/staging/media/ipu3/ipu3-css-fw.h } dmem; dmem 66 drivers/staging/media/ipu3/ipu3-css-fw.h } dmem; dmem 2832 drivers/staging/media/ipu3/ipu3-css-params.c &pofs->dmem.tnr3, dmem 2845 drivers/staging/media/ipu3/ipu3-css-params.c m, &pofs->dmem.xnr3, sizeof(*xnr_dmem)); dmem 729 drivers/staging/media/ipu3/ipu3-css.c &cofs->dmem.iterator, dmem 795 drivers/staging/media/ipu3/ipu3-css.c &cofs->dmem.ref, dmem 825 drivers/staging/media/ipu3/ipu3-css.c &cofs->dmem.dvs, sizeof(*cfg_dvs), dmem 841 drivers/staging/media/ipu3/ipu3-css.c &cofs->dmem.tnr3, dmem 872 drivers/staging/media/ipu3/ipu3-css.c &sofs->dmem.ref, dmem 885 drivers/staging/media/ipu3/ipu3-css.c &sofs->dmem.tnr3, dmem 1960 drivers/staging/media/ipu3/ipu3-css.c abi_buf->payload.s3a.data.dmem.s3a_tbl = b->daddr; dmem 200 drivers/usb/host/imx21-dbg.c struct imx21_dmem_area *dmem; dmem 215 drivers/usb/host/imx21-dbg.c list_for_each_entry(dmem, &imx21->dmem_list, list) { dmem 216 drivers/usb/host/imx21-dbg.c dmem_bytes += dmem->size; dmem 253 drivers/usb/host/imx21-dbg.c struct imx21_dmem_area *dmem; dmem 259 drivers/usb/host/imx21-dbg.c list_for_each_entry(dmem, &imx21->dmem_list, list) dmem 263 drivers/usb/host/imx21-dbg.c dmem->offset, dmem->size, dmem 264 drivers/usb/host/imx21-dbg.c format_ep(dmem->ep, ep_text, sizeof(ep_text))); dmem 234 drivers/usb/host/imx21-hcd.c void __iomem *dmem = imx21->regs + USBOTG_DMEM + dmem_offset; dmem 244 drivers/usb/host/imx21-hcd.c writel(word, dmem); dmem 245 drivers/usb/host/imx21-hcd.c dmem += 4; dmem 251 drivers/usb/host/imx21-hcd.c writel(word, dmem);