vram_node 29 drivers/gpu/drm/etnaviv/etnaviv_gem.h struct drm_mm_node vram_node; vram_node 134 drivers/gpu/drm/etnaviv/etnaviv_mmu.c etnaviv_iommu_unmap(context, mapping->vram_node.start, vram_node 136 drivers/gpu/drm/etnaviv/etnaviv_mmu.c drm_mm_remove_node(&mapping->vram_node); vram_node 166 drivers/gpu/drm/etnaviv/etnaviv_mmu.c if (!free->vram_node.mm) vram_node 177 drivers/gpu/drm/etnaviv/etnaviv_mmu.c if (drm_mm_scan_add_block(&scan, &free->vram_node)) { vram_node 186 drivers/gpu/drm/etnaviv/etnaviv_mmu.c BUG_ON(drm_mm_scan_remove_block(&scan, &m->vram_node)); vram_node 197 drivers/gpu/drm/etnaviv/etnaviv_mmu.c if (!drm_mm_scan_remove_block(&scan, &m->vram_node)) vram_node 256 drivers/gpu/drm/etnaviv/etnaviv_mmu.c node = &mapping->vram_node; vram_node 292 drivers/gpu/drm/etnaviv/etnaviv_mmu.c if (mapping->vram_node.mm == &context->mm) vram_node 378 drivers/gpu/drm/etnaviv/etnaviv_mmu.c struct drm_mm_node *node = &mapping->vram_node; vram_node 410 drivers/gpu/drm/etnaviv/etnaviv_mmu.c struct drm_mm_node *node = &mapping->vram_node; vram_node 27 drivers/gpu/drm/msm/msm_gem.c return (((dma_addr_t)msm_obj->vram_node->start) << PAGE_SHIFT) + vram_node 34 drivers/gpu/drm/msm/msm_gem.c return !msm_obj->vram_node; vram_node 91 drivers/gpu/drm/msm/msm_gem.c ret = drm_mm_insert_node(&priv->vram.mm, msm_obj->vram_node, npages); vram_node 154 drivers/gpu/drm/msm/msm_gem.c drm_mm_remove_node(msm_obj->vram_node); vram_node 1065 drivers/gpu/drm/msm/msm_gem.c to_msm_bo(obj)->vram_node = &vma->node; vram_node 81 drivers/gpu/drm/msm/msm_gem.h struct drm_mm_node *vram_node;