px_dma 994 drivers/gpu/drm/i915/gem/i915_gem_context.c const dma_addr_t pd_daddr = px_dma(ppgtt->pd); px_dma 58 drivers/gpu/drm/i915/gt/intel_lrc_reg.h const u64 addr__ = px_dma(ppgtt->pd); \ px_dma 372 drivers/gpu/drm/i915/gvt/scheduler.c px_dma(ppgtt->pd) = mm->ppgtt_mm.shadow_pdps[0]; px_dma 382 drivers/gpu/drm/i915/gvt/scheduler.c px_dma(pd) = mm->ppgtt_mm.shadow_pdps[i]; px_dma 1152 drivers/gpu/drm/i915/gvt/scheduler.c px_dma(ppgtt->pd) = s->i915_context_pml4; px_dma 1158 drivers/gpu/drm/i915/gvt/scheduler.c px_dma(pd) = s->i915_context_pdps[i]; px_dma 1213 drivers/gpu/drm/i915/gvt/scheduler.c s->i915_context_pml4 = px_dma(ppgtt->pd); px_dma 1219 drivers/gpu/drm/i915/gvt/scheduler.c s->i915_context_pdps[i] = px_dma(pd); px_dma 693 drivers/gpu/drm/i915/i915_gem_gtt.c if (!px_dma(&vm->scratch[0])) /* set to 0 on clones */ px_dma 697 drivers/gpu/drm/i915/i915_gem_gtt.c if (!px_dma(&vm->scratch[i])) px_dma 844 drivers/gpu/drm/i915/i915_gem_gtt.c const u64 daddr = px_dma(ppgtt->pd); px_dma 1377 drivers/gpu/drm/i915/i915_gem_gtt.c px_dma(&vm->scratch[0]) = 0; /* no xfer of ownership */ px_dma 1386 drivers/gpu/drm/i915/i915_gem_gtt.c gen8_pte_encode(px_dma(&vm->scratch[0]), px_dma 1395 drivers/gpu/drm/i915/i915_gem_gtt.c gen8_pde_encode(px_dma(&vm->scratch[i]), px_dma 1547 drivers/gpu/drm/i915/i915_gem_gtt.c iowrite32(GEN6_PDE_ADDR_ENCODE(px_dma(pt)) | GEN6_PDE_VALID, px_dma 1761 drivers/gpu/drm/i915/i915_gem_gtt.c vm->pte_encode(px_dma(&vm->scratch[0]), px_dma 2872 drivers/gpu/drm/i915/i915_gem_gtt.c ggtt->vm.pte_encode(px_dma(&ggtt->vm.scratch[0]), px_dma 535 drivers/gpu/drm/i915/i915_gem_gtt.h return px_dma(pt ?: px_base(&ppgtt->vm.scratch[ppgtt->vm.top]));