wa_ctx            398 drivers/gpu/drm/i915/gt/intel_engine_types.h 	struct i915_ctx_workarounds wa_ctx;
wa_ctx           2234 drivers/gpu/drm/i915/gt/intel_lrc.c 	engine->wa_ctx.vma = vma;
wa_ctx           2244 drivers/gpu/drm/i915/gt/intel_lrc.c 	i915_vma_unpin_and_release(&engine->wa_ctx.vma, 0);
wa_ctx           2251 drivers/gpu/drm/i915/gt/intel_lrc.c 	struct i915_ctx_workarounds *wa_ctx = &engine->wa_ctx;
wa_ctx           2252 drivers/gpu/drm/i915/gt/intel_lrc.c 	struct i915_wa_ctx_bb *wa_bb[2] = { &wa_ctx->indirect_ctx,
wa_ctx           2253 drivers/gpu/drm/i915/gt/intel_lrc.c 					    &wa_ctx->per_ctx };
wa_ctx           2290 drivers/gpu/drm/i915/gt/intel_lrc.c 	page = i915_gem_object_get_dirty_page(wa_ctx->vma->obj, 0);
wa_ctx           3230 drivers/gpu/drm/i915/gt/intel_lrc.c 		struct i915_ctx_workarounds *wa_ctx = &engine->wa_ctx;
wa_ctx           3235 drivers/gpu/drm/i915/gt/intel_lrc.c 		if (wa_ctx->indirect_ctx.size) {
wa_ctx           3236 drivers/gpu/drm/i915/gt/intel_lrc.c 			u32 ggtt_offset = i915_ggtt_offset(wa_ctx->vma);
wa_ctx           3239 drivers/gpu/drm/i915/gt/intel_lrc.c 				(ggtt_offset + wa_ctx->indirect_ctx.offset) |
wa_ctx           3240 drivers/gpu/drm/i915/gt/intel_lrc.c 				(wa_ctx->indirect_ctx.size / CACHELINE_BYTES);
wa_ctx           3247 drivers/gpu/drm/i915/gt/intel_lrc.c 		if (wa_ctx->per_ctx.size) {
wa_ctx           3248 drivers/gpu/drm/i915/gt/intel_lrc.c 			u32 ggtt_offset = i915_ggtt_offset(wa_ctx->vma);
wa_ctx           3251 drivers/gpu/drm/i915/gt/intel_lrc.c 				(ggtt_offset + wa_ctx->per_ctx.offset) | 0x01;
wa_ctx           2806 drivers/gpu/drm/i915/gvt/cmd_parser.c static int scan_wa_ctx(struct intel_shadow_wa_ctx *wa_ctx)
wa_ctx           2812 drivers/gpu/drm/i915/gvt/cmd_parser.c 	struct intel_vgpu_workload *workload = container_of(wa_ctx,
wa_ctx           2814 drivers/gpu/drm/i915/gvt/cmd_parser.c 				wa_ctx);
wa_ctx           2817 drivers/gpu/drm/i915/gvt/cmd_parser.c 	if (WARN_ON(!IS_ALIGNED(wa_ctx->indirect_ctx.guest_gma,
wa_ctx           2821 drivers/gpu/drm/i915/gvt/cmd_parser.c 	ring_tail = wa_ctx->indirect_ctx.size + 3 * sizeof(u32);
wa_ctx           2822 drivers/gpu/drm/i915/gvt/cmd_parser.c 	ring_size = round_up(wa_ctx->indirect_ctx.size + CACHELINE_BYTES,
wa_ctx           2824 drivers/gpu/drm/i915/gvt/cmd_parser.c 	gma_head = wa_ctx->indirect_ctx.guest_gma;
wa_ctx           2825 drivers/gpu/drm/i915/gvt/cmd_parser.c 	gma_tail = wa_ctx->indirect_ctx.guest_gma + ring_tail;
wa_ctx           2826 drivers/gpu/drm/i915/gvt/cmd_parser.c 	gma_bottom = wa_ctx->indirect_ctx.guest_gma + ring_size;
wa_ctx           2832 drivers/gpu/drm/i915/gvt/cmd_parser.c 	s.ring_start = wa_ctx->indirect_ctx.guest_gma;
wa_ctx           2836 drivers/gpu/drm/i915/gvt/cmd_parser.c 	s.rb_va = wa_ctx->indirect_ctx.shadow_va;
wa_ctx           2845 drivers/gpu/drm/i915/gvt/cmd_parser.c 		wa_ctx->indirect_ctx.guest_gma, ring_size);
wa_ctx           2929 drivers/gpu/drm/i915/gvt/cmd_parser.c static int shadow_indirect_ctx(struct intel_shadow_wa_ctx *wa_ctx)
wa_ctx           2931 drivers/gpu/drm/i915/gvt/cmd_parser.c 	int ctx_size = wa_ctx->indirect_ctx.size;
wa_ctx           2932 drivers/gpu/drm/i915/gvt/cmd_parser.c 	unsigned long guest_gma = wa_ctx->indirect_ctx.guest_gma;
wa_ctx           2933 drivers/gpu/drm/i915/gvt/cmd_parser.c 	struct intel_vgpu_workload *workload = container_of(wa_ctx,
wa_ctx           2935 drivers/gpu/drm/i915/gvt/cmd_parser.c 					wa_ctx);
wa_ctx           2972 drivers/gpu/drm/i915/gvt/cmd_parser.c 	wa_ctx->indirect_ctx.obj = obj;
wa_ctx           2973 drivers/gpu/drm/i915/gvt/cmd_parser.c 	wa_ctx->indirect_ctx.shadow_va = map;
wa_ctx           2983 drivers/gpu/drm/i915/gvt/cmd_parser.c static int combine_wa_ctx(struct intel_shadow_wa_ctx *wa_ctx)
wa_ctx           2988 drivers/gpu/drm/i915/gvt/cmd_parser.c 	if (!wa_ctx->per_ctx.valid)
wa_ctx           2992 drivers/gpu/drm/i915/gvt/cmd_parser.c 	per_ctx_start[1] = wa_ctx->per_ctx.guest_gma;
wa_ctx           2994 drivers/gpu/drm/i915/gvt/cmd_parser.c 	bb_start_sva = (unsigned char *)wa_ctx->indirect_ctx.shadow_va +
wa_ctx           2995 drivers/gpu/drm/i915/gvt/cmd_parser.c 				wa_ctx->indirect_ctx.size;
wa_ctx           3002 drivers/gpu/drm/i915/gvt/cmd_parser.c int intel_gvt_scan_and_shadow_wa_ctx(struct intel_shadow_wa_ctx *wa_ctx)
wa_ctx           3005 drivers/gpu/drm/i915/gvt/cmd_parser.c 	struct intel_vgpu_workload *workload = container_of(wa_ctx,
wa_ctx           3007 drivers/gpu/drm/i915/gvt/cmd_parser.c 					wa_ctx);
wa_ctx           3010 drivers/gpu/drm/i915/gvt/cmd_parser.c 	if (wa_ctx->indirect_ctx.size == 0)
wa_ctx           3013 drivers/gpu/drm/i915/gvt/cmd_parser.c 	ret = shadow_indirect_ctx(wa_ctx);
wa_ctx           3019 drivers/gpu/drm/i915/gvt/cmd_parser.c 	combine_wa_ctx(wa_ctx);
wa_ctx           3021 drivers/gpu/drm/i915/gvt/cmd_parser.c 	ret = scan_wa_ctx(wa_ctx);
wa_ctx             47 drivers/gpu/drm/i915/gvt/cmd_parser.h int intel_gvt_scan_and_shadow_wa_ctx(struct intel_shadow_wa_ctx *wa_ctx);
wa_ctx            352 drivers/gpu/drm/i915/gvt/scheduler.c static void release_shadow_wa_ctx(struct intel_shadow_wa_ctx *wa_ctx)
wa_ctx            354 drivers/gpu/drm/i915/gvt/scheduler.c 	if (!wa_ctx->indirect_ctx.obj)
wa_ctx            357 drivers/gpu/drm/i915/gvt/scheduler.c 	i915_gem_object_unpin_map(wa_ctx->indirect_ctx.obj);
wa_ctx            358 drivers/gpu/drm/i915/gvt/scheduler.c 	i915_gem_object_put(wa_ctx->indirect_ctx.obj);
wa_ctx            360 drivers/gpu/drm/i915/gvt/scheduler.c 	wa_ctx->indirect_ctx.obj = NULL;
wa_ctx            361 drivers/gpu/drm/i915/gvt/scheduler.c 	wa_ctx->indirect_ctx.shadow_va = NULL;
wa_ctx            438 drivers/gpu/drm/i915/gvt/scheduler.c 	if (workload->ring_id == RCS0 && workload->wa_ctx.indirect_ctx.size) {
wa_ctx            439 drivers/gpu/drm/i915/gvt/scheduler.c 		ret = intel_gvt_scan_and_shadow_wa_ctx(&workload->wa_ctx);
wa_ctx            447 drivers/gpu/drm/i915/gvt/scheduler.c 	release_shadow_wa_ctx(&workload->wa_ctx);
wa_ctx            530 drivers/gpu/drm/i915/gvt/scheduler.c static void update_wa_ctx_2_shadow_ctx(struct intel_shadow_wa_ctx *wa_ctx)
wa_ctx            533 drivers/gpu/drm/i915/gvt/scheduler.c 		container_of(wa_ctx, struct intel_vgpu_workload, wa_ctx);
wa_ctx            540 drivers/gpu/drm/i915/gvt/scheduler.c 		(~PER_CTX_ADDR_MASK)) | wa_ctx->per_ctx.shadow_gma;
wa_ctx            543 drivers/gpu/drm/i915/gvt/scheduler.c 		(~INDIRECT_CTX_ADDR_MASK)) | wa_ctx->indirect_ctx.shadow_gma;
wa_ctx            546 drivers/gpu/drm/i915/gvt/scheduler.c static int prepare_shadow_wa_ctx(struct intel_shadow_wa_ctx *wa_ctx)
wa_ctx            550 drivers/gpu/drm/i915/gvt/scheduler.c 		(unsigned char *)wa_ctx->indirect_ctx.shadow_va +
wa_ctx            551 drivers/gpu/drm/i915/gvt/scheduler.c 		wa_ctx->indirect_ctx.size;
wa_ctx            553 drivers/gpu/drm/i915/gvt/scheduler.c 	if (wa_ctx->indirect_ctx.size == 0)
wa_ctx            556 drivers/gpu/drm/i915/gvt/scheduler.c 	vma = i915_gem_object_ggtt_pin(wa_ctx->indirect_ctx.obj, NULL,
wa_ctx            566 drivers/gpu/drm/i915/gvt/scheduler.c 	wa_ctx->indirect_ctx.shadow_gma = i915_ggtt_offset(vma);
wa_ctx            568 drivers/gpu/drm/i915/gvt/scheduler.c 	wa_ctx->per_ctx.shadow_gma = *((unsigned int *)per_ctx_va + 1);
wa_ctx            571 drivers/gpu/drm/i915/gvt/scheduler.c 	update_wa_ctx_2_shadow_ctx(wa_ctx);
wa_ctx            667 drivers/gpu/drm/i915/gvt/scheduler.c 	ret = prepare_shadow_wa_ctx(&workload->wa_ctx);
wa_ctx            681 drivers/gpu/drm/i915/gvt/scheduler.c 	release_shadow_wa_ctx(&workload->wa_ctx);
wa_ctx            713 drivers/gpu/drm/i915/gvt/scheduler.c 		release_shadow_wa_ctx(&workload->wa_ctx);
wa_ctx           1383 drivers/gpu/drm/i915/gvt/scheduler.c 	release_shadow_wa_ctx(&workload->wa_ctx);
wa_ctx           1558 drivers/gpu/drm/i915/gvt/scheduler.c 		workload->wa_ctx.indirect_ctx.guest_gma =
wa_ctx           1560 drivers/gpu/drm/i915/gvt/scheduler.c 		workload->wa_ctx.indirect_ctx.size =
wa_ctx           1564 drivers/gpu/drm/i915/gvt/scheduler.c 		if (workload->wa_ctx.indirect_ctx.size != 0) {
wa_ctx           1566 drivers/gpu/drm/i915/gvt/scheduler.c 				workload->wa_ctx.indirect_ctx.guest_gma,
wa_ctx           1567 drivers/gpu/drm/i915/gvt/scheduler.c 				workload->wa_ctx.indirect_ctx.size)) {
wa_ctx           1569 drivers/gpu/drm/i915/gvt/scheduler.c 				    workload->wa_ctx.indirect_ctx.guest_gma);
wa_ctx           1575 drivers/gpu/drm/i915/gvt/scheduler.c 		workload->wa_ctx.per_ctx.guest_gma =
wa_ctx           1577 drivers/gpu/drm/i915/gvt/scheduler.c 		workload->wa_ctx.per_ctx.valid = per_ctx & 1;
wa_ctx           1578 drivers/gpu/drm/i915/gvt/scheduler.c 		if (workload->wa_ctx.per_ctx.valid) {
wa_ctx           1580 drivers/gpu/drm/i915/gvt/scheduler.c 				workload->wa_ctx.per_ctx.guest_gma,
wa_ctx           1583 drivers/gpu/drm/i915/gvt/scheduler.c 					workload->wa_ctx.per_ctx.guest_gma);
wa_ctx            113 drivers/gpu/drm/i915/gvt/scheduler.h 	struct intel_shadow_wa_ctx wa_ctx;
wa_ctx            772 drivers/gpu/drm/i915/i915_gpu_error.c 		print_error_obj(m, ee->engine, "WA context", ee->wa_ctx);
wa_ctx            940 drivers/gpu/drm/i915/i915_gpu_error.c 		i915_error_object_free(ee->wa_ctx);
wa_ctx           1446 drivers/gpu/drm/i915/i915_gpu_error.c 		ee->wa_ctx =
wa_ctx           1448 drivers/gpu/drm/i915/i915_gpu_error.c 						 engine->wa_ctx.vma,
wa_ctx            139 drivers/gpu/drm/i915/i915_gpu_error.h 		struct drm_i915_error_object *wa_ctx;