Lines Matching refs:ring

580 				struct intel_engine_cs *ring,  in i915_gem_execbuffer_reserve_vma()  argument
689 i915_gem_execbuffer_reserve(struct intel_engine_cs *ring, in i915_gem_execbuffer_reserve() argument
698 bool has_fenced_gpu_access = INTEL_INFO(ring->dev)->gen < 4; in i915_gem_execbuffer_reserve()
701 i915_gem_retire_requests_ring(ring); in i915_gem_execbuffer_reserve()
759 ret = i915_gem_execbuffer_reserve_vma(vma, ring, need_relocs); in i915_gem_execbuffer_reserve()
769 ret = i915_gem_execbuffer_reserve_vma(vma, ring, need_relocs); in i915_gem_execbuffer_reserve()
792 struct intel_engine_cs *ring, in i915_gem_execbuffer_relocate_slow() argument
881 ret = i915_gem_execbuffer_reserve(ring, &eb->vmas, ctx, &need_relocs); in i915_gem_execbuffer_relocate_slow()
909 const unsigned other_rings = ~intel_ring_flag(req->ring); in i915_gem_execbuffer_move_to_gpu()
919 ret = i915_gem_object_sync(obj, req->ring, &req); in i915_gem_execbuffer_move_to_gpu()
931 i915_gem_chipset_flush(req->ring->dev); in i915_gem_execbuffer_move_to_gpu()
1018 struct intel_engine_cs *ring, const u32 ctx_id) in i915_gem_validate_context() argument
1023 if (ring->id != RCS && ctx_id != DEFAULT_CONTEXT_HANDLE) in i915_gem_validate_context()
1036 if (i915.enable_execlists && !ctx->engine[ring->id].state) { in i915_gem_validate_context()
1037 int ret = intel_lr_context_deferred_alloc(ctx, ring); in i915_gem_validate_context()
1051 struct intel_engine_cs *ring = i915_gem_request_get_ring(req); in i915_gem_execbuffer_move_to_active() local
1078 struct drm_i915_private *dev_priv = to_i915(ring->dev); in i915_gem_execbuffer_move_to_active()
1092 params->ring->gpu_caches_dirty = true; in i915_gem_execbuffer_retire_commands()
1102 struct intel_engine_cs *ring = req->ring; in i915_reset_gen7_sol_offsets() local
1106 if (!IS_GEN7(dev) || ring != &dev_priv->ring[RCS]) { in i915_reset_gen7_sol_offsets()
1116 intel_ring_emit(ring, MI_LOAD_REGISTER_IMM(1)); in i915_reset_gen7_sol_offsets()
1117 intel_ring_emit(ring, GEN7_SO_WRITE_OFFSET(i)); in i915_reset_gen7_sol_offsets()
1118 intel_ring_emit(ring, 0); in i915_reset_gen7_sol_offsets()
1121 intel_ring_advance(ring); in i915_reset_gen7_sol_offsets()
1127 i915_gem_execbuffer_parse(struct intel_engine_cs *ring, in i915_gem_execbuffer_parse() argument
1139 shadow_batch_obj = i915_gem_batch_pool_get(&ring->batch_pool, in i915_gem_execbuffer_parse()
1144 ret = i915_parse_cmds(ring, in i915_gem_execbuffer_parse()
1185 struct intel_engine_cs *ring = params->ring; in i915_gem_ringbuffer_submission() local
1200 WARN(params->ctx->ppgtt && params->ctx->ppgtt->pd_dirty_rings & (1<<ring->id), in i915_gem_ringbuffer_submission()
1201 "%s didn't clear reload\n", ring->name); in i915_gem_ringbuffer_submission()
1209 if (instp_mode != 0 && ring != &dev_priv->ring[RCS]) { in i915_gem_ringbuffer_submission()
1236 if (ring == &dev_priv->ring[RCS] && in i915_gem_ringbuffer_submission()
1242 intel_ring_emit(ring, MI_NOOP); in i915_gem_ringbuffer_submission()
1243 intel_ring_emit(ring, MI_LOAD_REGISTER_IMM(1)); in i915_gem_ringbuffer_submission()
1244 intel_ring_emit(ring, INSTPM); in i915_gem_ringbuffer_submission()
1245 intel_ring_emit(ring, instp_mask << 16 | instp_mode); in i915_gem_ringbuffer_submission()
1246 intel_ring_advance(ring); in i915_gem_ringbuffer_submission()
1261 ret = ring->dispatch_execbuffer(params->request, in i915_gem_ringbuffer_submission()
1300 file_priv->bsd_ring = &dev_priv->ring[ring_id]; in gen8_dispatch_bsd_ring()
1335 struct intel_engine_cs *ring; in i915_gem_do_execbuffer() local
1376 ring = &dev_priv->ring[RCS]; in i915_gem_do_execbuffer()
1384 ring = &dev_priv->ring[ring_id]; in i915_gem_do_execbuffer()
1387 ring = &dev_priv->ring[VCS]; in i915_gem_do_execbuffer()
1390 ring = &dev_priv->ring[VCS2]; in i915_gem_do_execbuffer()
1398 ring = &dev_priv->ring[VCS]; in i915_gem_do_execbuffer()
1400 ring = &dev_priv->ring[(args->flags & I915_EXEC_RING_MASK) - 1]; in i915_gem_do_execbuffer()
1402 if (!intel_ring_initialized(ring)) { in i915_gem_do_execbuffer()
1418 if (ring->id != RCS) { in i915_gem_do_execbuffer()
1420 ring->name); in i915_gem_do_execbuffer()
1433 ctx = i915_gem_validate_context(dev, file, ring, ctx_id); in i915_gem_do_execbuffer()
1467 ret = i915_gem_execbuffer_reserve(ring, &eb->vmas, ctx, &need_relocs); in i915_gem_do_execbuffer()
1476 ret = i915_gem_execbuffer_relocate_slow(dev, args, file, ring, in i915_gem_do_execbuffer()
1492 if (i915_needs_cmd_parser(ring) && args->batch_len) { in i915_gem_do_execbuffer()
1495 parsed_batch_obj = i915_gem_execbuffer_parse(ring, in i915_gem_do_execbuffer()
1553 ret = i915_gem_request_alloc(ring, ctx, &params->request); in i915_gem_do_execbuffer()
1569 params->ring = ring; in i915_gem_do_execbuffer()