vgpu               41 drivers/gpu/drm/i915/gvt/aperture_gm.c static int alloc_gm(struct intel_vgpu *vgpu, bool high_gm)
vgpu               43 drivers/gpu/drm/i915/gvt/aperture_gm.c 	struct intel_gvt *gvt = vgpu->gvt;
vgpu               51 drivers/gpu/drm/i915/gvt/aperture_gm.c 		node = &vgpu->gm.high_gm_node;
vgpu               52 drivers/gpu/drm/i915/gvt/aperture_gm.c 		size = vgpu_hidden_sz(vgpu);
vgpu               57 drivers/gpu/drm/i915/gvt/aperture_gm.c 		node = &vgpu->gm.low_gm_node;
vgpu               58 drivers/gpu/drm/i915/gvt/aperture_gm.c 		size = vgpu_aperture_sz(vgpu);
vgpu               79 drivers/gpu/drm/i915/gvt/aperture_gm.c static int alloc_vgpu_gm(struct intel_vgpu *vgpu)
vgpu               81 drivers/gpu/drm/i915/gvt/aperture_gm.c 	struct intel_gvt *gvt = vgpu->gvt;
vgpu               85 drivers/gpu/drm/i915/gvt/aperture_gm.c 	ret = alloc_gm(vgpu, false);
vgpu               89 drivers/gpu/drm/i915/gvt/aperture_gm.c 	ret = alloc_gm(vgpu, true);
vgpu               93 drivers/gpu/drm/i915/gvt/aperture_gm.c 	gvt_dbg_core("vgpu%d: alloc low GM start %llx size %llx\n", vgpu->id,
vgpu               94 drivers/gpu/drm/i915/gvt/aperture_gm.c 		     vgpu_aperture_offset(vgpu), vgpu_aperture_sz(vgpu));
vgpu               96 drivers/gpu/drm/i915/gvt/aperture_gm.c 	gvt_dbg_core("vgpu%d: alloc high GM start %llx size %llx\n", vgpu->id,
vgpu               97 drivers/gpu/drm/i915/gvt/aperture_gm.c 		     vgpu_hidden_offset(vgpu), vgpu_hidden_sz(vgpu));
vgpu              102 drivers/gpu/drm/i915/gvt/aperture_gm.c 	drm_mm_remove_node(&vgpu->gm.low_gm_node);
vgpu              107 drivers/gpu/drm/i915/gvt/aperture_gm.c static void free_vgpu_gm(struct intel_vgpu *vgpu)
vgpu              109 drivers/gpu/drm/i915/gvt/aperture_gm.c 	struct drm_i915_private *dev_priv = vgpu->gvt->dev_priv;
vgpu              112 drivers/gpu/drm/i915/gvt/aperture_gm.c 	drm_mm_remove_node(&vgpu->gm.low_gm_node);
vgpu              113 drivers/gpu/drm/i915/gvt/aperture_gm.c 	drm_mm_remove_node(&vgpu->gm.high_gm_node);
vgpu              127 drivers/gpu/drm/i915/gvt/aperture_gm.c void intel_vgpu_write_fence(struct intel_vgpu *vgpu,
vgpu              130 drivers/gpu/drm/i915/gvt/aperture_gm.c 	struct intel_gvt *gvt = vgpu->gvt;
vgpu              137 drivers/gpu/drm/i915/gvt/aperture_gm.c 	if (WARN_ON(fence >= vgpu_fence_sz(vgpu)))
vgpu              140 drivers/gpu/drm/i915/gvt/aperture_gm.c 	reg = vgpu->fence.regs[fence];
vgpu              155 drivers/gpu/drm/i915/gvt/aperture_gm.c static void _clear_vgpu_fence(struct intel_vgpu *vgpu)
vgpu              159 drivers/gpu/drm/i915/gvt/aperture_gm.c 	for (i = 0; i < vgpu_fence_sz(vgpu); i++)
vgpu              160 drivers/gpu/drm/i915/gvt/aperture_gm.c 		intel_vgpu_write_fence(vgpu, i, 0);
vgpu              163 drivers/gpu/drm/i915/gvt/aperture_gm.c static void free_vgpu_fence(struct intel_vgpu *vgpu)
vgpu              165 drivers/gpu/drm/i915/gvt/aperture_gm.c 	struct intel_gvt *gvt = vgpu->gvt;
vgpu              170 drivers/gpu/drm/i915/gvt/aperture_gm.c 	if (WARN_ON(!vgpu_fence_sz(vgpu)))
vgpu              176 drivers/gpu/drm/i915/gvt/aperture_gm.c 	_clear_vgpu_fence(vgpu);
vgpu              177 drivers/gpu/drm/i915/gvt/aperture_gm.c 	for (i = 0; i < vgpu_fence_sz(vgpu); i++) {
vgpu              178 drivers/gpu/drm/i915/gvt/aperture_gm.c 		reg = vgpu->fence.regs[i];
vgpu              180 drivers/gpu/drm/i915/gvt/aperture_gm.c 		vgpu->fence.regs[i] = NULL;
vgpu              187 drivers/gpu/drm/i915/gvt/aperture_gm.c static int alloc_vgpu_fence(struct intel_vgpu *vgpu)
vgpu              189 drivers/gpu/drm/i915/gvt/aperture_gm.c 	struct intel_gvt *gvt = vgpu->gvt;
vgpu              200 drivers/gpu/drm/i915/gvt/aperture_gm.c 	for (i = 0; i < vgpu_fence_sz(vgpu); i++) {
vgpu              205 drivers/gpu/drm/i915/gvt/aperture_gm.c 		vgpu->fence.regs[i] = reg;
vgpu              208 drivers/gpu/drm/i915/gvt/aperture_gm.c 	_clear_vgpu_fence(vgpu);
vgpu              216 drivers/gpu/drm/i915/gvt/aperture_gm.c 	for (i = 0; i < vgpu_fence_sz(vgpu); i++) {
vgpu              217 drivers/gpu/drm/i915/gvt/aperture_gm.c 		reg = vgpu->fence.regs[i];
vgpu              221 drivers/gpu/drm/i915/gvt/aperture_gm.c 		vgpu->fence.regs[i] = NULL;
vgpu              228 drivers/gpu/drm/i915/gvt/aperture_gm.c static void free_resource(struct intel_vgpu *vgpu)
vgpu              230 drivers/gpu/drm/i915/gvt/aperture_gm.c 	struct intel_gvt *gvt = vgpu->gvt;
vgpu              232 drivers/gpu/drm/i915/gvt/aperture_gm.c 	gvt->gm.vgpu_allocated_low_gm_size -= vgpu_aperture_sz(vgpu);
vgpu              233 drivers/gpu/drm/i915/gvt/aperture_gm.c 	gvt->gm.vgpu_allocated_high_gm_size -= vgpu_hidden_sz(vgpu);
vgpu              234 drivers/gpu/drm/i915/gvt/aperture_gm.c 	gvt->fence.vgpu_allocated_fence_num -= vgpu_fence_sz(vgpu);
vgpu              237 drivers/gpu/drm/i915/gvt/aperture_gm.c static int alloc_resource(struct intel_vgpu *vgpu,
vgpu              240 drivers/gpu/drm/i915/gvt/aperture_gm.c 	struct intel_gvt *gvt = vgpu->gvt;
vgpu              258 drivers/gpu/drm/i915/gvt/aperture_gm.c 	vgpu_aperture_sz(vgpu) = ALIGN(request, I915_GTT_PAGE_SIZE);
vgpu              269 drivers/gpu/drm/i915/gvt/aperture_gm.c 	vgpu_hidden_sz(vgpu) = ALIGN(request, I915_GTT_PAGE_SIZE);
vgpu              280 drivers/gpu/drm/i915/gvt/aperture_gm.c 	vgpu_fence_sz(vgpu) = request;
vgpu              302 drivers/gpu/drm/i915/gvt/aperture_gm.c void intel_vgpu_free_resource(struct intel_vgpu *vgpu)
vgpu              304 drivers/gpu/drm/i915/gvt/aperture_gm.c 	free_vgpu_gm(vgpu);
vgpu              305 drivers/gpu/drm/i915/gvt/aperture_gm.c 	free_vgpu_fence(vgpu);
vgpu              306 drivers/gpu/drm/i915/gvt/aperture_gm.c 	free_resource(vgpu);
vgpu              316 drivers/gpu/drm/i915/gvt/aperture_gm.c void intel_vgpu_reset_resource(struct intel_vgpu *vgpu)
vgpu              318 drivers/gpu/drm/i915/gvt/aperture_gm.c 	struct drm_i915_private *dev_priv = vgpu->gvt->dev_priv;
vgpu              321 drivers/gpu/drm/i915/gvt/aperture_gm.c 	_clear_vgpu_fence(vgpu);
vgpu              337 drivers/gpu/drm/i915/gvt/aperture_gm.c int intel_vgpu_alloc_resource(struct intel_vgpu *vgpu,
vgpu              342 drivers/gpu/drm/i915/gvt/aperture_gm.c 	ret = alloc_resource(vgpu, param);
vgpu              346 drivers/gpu/drm/i915/gvt/aperture_gm.c 	ret = alloc_vgpu_gm(vgpu);
vgpu              350 drivers/gpu/drm/i915/gvt/aperture_gm.c 	ret = alloc_vgpu_fence(vgpu);
vgpu              357 drivers/gpu/drm/i915/gvt/aperture_gm.c 	free_vgpu_gm(vgpu);
vgpu              359 drivers/gpu/drm/i915/gvt/aperture_gm.c 	free_resource(vgpu);
vgpu               68 drivers/gpu/drm/i915/gvt/cfg_space.c static void vgpu_pci_cfg_mem_write(struct intel_vgpu *vgpu, unsigned int off,
vgpu               71 drivers/gpu/drm/i915/gvt/cfg_space.c 	u8 *cfg_base = vgpu_cfg_space(vgpu);
vgpu              106 drivers/gpu/drm/i915/gvt/cfg_space.c int intel_vgpu_emulate_cfg_read(struct intel_vgpu *vgpu, unsigned int offset,
vgpu              112 drivers/gpu/drm/i915/gvt/cfg_space.c 	if (WARN_ON(offset + bytes > vgpu->gvt->device_info.cfg_space_size))
vgpu              115 drivers/gpu/drm/i915/gvt/cfg_space.c 	memcpy(p_data, vgpu_cfg_space(vgpu) + offset, bytes);
vgpu              119 drivers/gpu/drm/i915/gvt/cfg_space.c static int map_aperture(struct intel_vgpu *vgpu, bool map)
vgpu              121 drivers/gpu/drm/i915/gvt/cfg_space.c 	phys_addr_t aperture_pa = vgpu_aperture_pa_base(vgpu);
vgpu              122 drivers/gpu/drm/i915/gvt/cfg_space.c 	unsigned long aperture_sz = vgpu_aperture_sz(vgpu);
vgpu              127 drivers/gpu/drm/i915/gvt/cfg_space.c 	if (map == vgpu->cfg_space.bar[INTEL_GVT_PCI_BAR_APERTURE].tracked)
vgpu              130 drivers/gpu/drm/i915/gvt/cfg_space.c 	val = vgpu_cfg_space(vgpu)[PCI_BASE_ADDRESS_2];
vgpu              132 drivers/gpu/drm/i915/gvt/cfg_space.c 		val = *(u64 *)(vgpu_cfg_space(vgpu) + PCI_BASE_ADDRESS_2);
vgpu              134 drivers/gpu/drm/i915/gvt/cfg_space.c 		val = *(u32 *)(vgpu_cfg_space(vgpu) + PCI_BASE_ADDRESS_2);
vgpu              136 drivers/gpu/drm/i915/gvt/cfg_space.c 	first_gfn = (val + vgpu_aperture_offset(vgpu)) >> PAGE_SHIFT;
vgpu              138 drivers/gpu/drm/i915/gvt/cfg_space.c 	ret = intel_gvt_hypervisor_map_gfn_to_mfn(vgpu, first_gfn,
vgpu              145 drivers/gpu/drm/i915/gvt/cfg_space.c 	vgpu->cfg_space.bar[INTEL_GVT_PCI_BAR_APERTURE].tracked = map;
vgpu              149 drivers/gpu/drm/i915/gvt/cfg_space.c static int trap_gttmmio(struct intel_vgpu *vgpu, bool trap)
vgpu              155 drivers/gpu/drm/i915/gvt/cfg_space.c 	if (trap == vgpu->cfg_space.bar[INTEL_GVT_PCI_BAR_GTTMMIO].tracked)
vgpu              158 drivers/gpu/drm/i915/gvt/cfg_space.c 	val = vgpu_cfg_space(vgpu)[PCI_BASE_ADDRESS_0];
vgpu              160 drivers/gpu/drm/i915/gvt/cfg_space.c 		start = *(u64 *)(vgpu_cfg_space(vgpu) + PCI_BASE_ADDRESS_0);
vgpu              162 drivers/gpu/drm/i915/gvt/cfg_space.c 		start = *(u32 *)(vgpu_cfg_space(vgpu) + PCI_BASE_ADDRESS_0);
vgpu              165 drivers/gpu/drm/i915/gvt/cfg_space.c 	end = start + vgpu->cfg_space.bar[INTEL_GVT_PCI_BAR_GTTMMIO].size - 1;
vgpu              167 drivers/gpu/drm/i915/gvt/cfg_space.c 	ret = intel_gvt_hypervisor_set_trap_area(vgpu, start, end, trap);
vgpu              171 drivers/gpu/drm/i915/gvt/cfg_space.c 	vgpu->cfg_space.bar[INTEL_GVT_PCI_BAR_GTTMMIO].tracked = trap;
vgpu              175 drivers/gpu/drm/i915/gvt/cfg_space.c static int emulate_pci_command_write(struct intel_vgpu *vgpu,
vgpu              178 drivers/gpu/drm/i915/gvt/cfg_space.c 	u8 old = vgpu_cfg_space(vgpu)[offset];
vgpu              183 drivers/gpu/drm/i915/gvt/cfg_space.c 	vgpu_pci_cfg_mem_write(vgpu, offset, p_data, bytes);
vgpu              188 drivers/gpu/drm/i915/gvt/cfg_space.c 		ret = trap_gttmmio(vgpu, false);
vgpu              191 drivers/gpu/drm/i915/gvt/cfg_space.c 		ret = map_aperture(vgpu, false);
vgpu              195 drivers/gpu/drm/i915/gvt/cfg_space.c 		ret = trap_gttmmio(vgpu, true);
vgpu              198 drivers/gpu/drm/i915/gvt/cfg_space.c 		ret = map_aperture(vgpu, true);
vgpu              206 drivers/gpu/drm/i915/gvt/cfg_space.c static int emulate_pci_rom_bar_write(struct intel_vgpu *vgpu,
vgpu              209 drivers/gpu/drm/i915/gvt/cfg_space.c 	u32 *pval = (u32 *)(vgpu_cfg_space(vgpu) + offset);
vgpu              216 drivers/gpu/drm/i915/gvt/cfg_space.c 		vgpu_pci_cfg_mem_write(vgpu, offset, p_data, bytes);
vgpu              220 drivers/gpu/drm/i915/gvt/cfg_space.c static int emulate_pci_bar_write(struct intel_vgpu *vgpu, unsigned int offset,
vgpu              228 drivers/gpu/drm/i915/gvt/cfg_space.c 		vgpu_cfg_space(vgpu)[PCI_COMMAND] & PCI_COMMAND_MEMORY;
vgpu              229 drivers/gpu/drm/i915/gvt/cfg_space.c 	struct intel_vgpu_pci_bar *bars = vgpu->cfg_space.bar;
vgpu              243 drivers/gpu/drm/i915/gvt/cfg_space.c 			intel_vgpu_write_pci_bar(vgpu, offset,
vgpu              249 drivers/gpu/drm/i915/gvt/cfg_space.c 			ret = trap_gttmmio(vgpu, false);
vgpu              254 drivers/gpu/drm/i915/gvt/cfg_space.c 			intel_vgpu_write_pci_bar(vgpu, offset,
vgpu              256 drivers/gpu/drm/i915/gvt/cfg_space.c 			ret = map_aperture(vgpu, false);
vgpu              260 drivers/gpu/drm/i915/gvt/cfg_space.c 			intel_vgpu_write_pci_bar(vgpu, offset, 0x0, false);
vgpu              270 drivers/gpu/drm/i915/gvt/cfg_space.c 			trap_gttmmio(vgpu, false);
vgpu              271 drivers/gpu/drm/i915/gvt/cfg_space.c 			intel_vgpu_write_pci_bar(vgpu, offset, new, lo);
vgpu              272 drivers/gpu/drm/i915/gvt/cfg_space.c 			ret = trap_gttmmio(vgpu, mmio_enabled);
vgpu              276 drivers/gpu/drm/i915/gvt/cfg_space.c 			map_aperture(vgpu, false);
vgpu              277 drivers/gpu/drm/i915/gvt/cfg_space.c 			intel_vgpu_write_pci_bar(vgpu, offset, new, lo);
vgpu              278 drivers/gpu/drm/i915/gvt/cfg_space.c 			ret = map_aperture(vgpu, mmio_enabled);
vgpu              281 drivers/gpu/drm/i915/gvt/cfg_space.c 			intel_vgpu_write_pci_bar(vgpu, offset, new, lo);
vgpu              297 drivers/gpu/drm/i915/gvt/cfg_space.c int intel_vgpu_emulate_cfg_write(struct intel_vgpu *vgpu, unsigned int offset,
vgpu              305 drivers/gpu/drm/i915/gvt/cfg_space.c 	if (WARN_ON(offset + bytes > vgpu->gvt->device_info.cfg_space_size))
vgpu              312 drivers/gpu/drm/i915/gvt/cfg_space.c 		return emulate_pci_command_write(vgpu, offset, p_data, bytes);
vgpu              319 drivers/gpu/drm/i915/gvt/cfg_space.c 		return emulate_pci_rom_bar_write(vgpu, offset, p_data, bytes);
vgpu              324 drivers/gpu/drm/i915/gvt/cfg_space.c 		return emulate_pci_bar_write(vgpu, offset, p_data, bytes);
vgpu              329 drivers/gpu/drm/i915/gvt/cfg_space.c 		ret = intel_vgpu_emulate_opregion_request(vgpu, *(u32 *)p_data);
vgpu              337 drivers/gpu/drm/i915/gvt/cfg_space.c 		ret = intel_vgpu_opregion_base_write_handler(vgpu,
vgpu              342 drivers/gpu/drm/i915/gvt/cfg_space.c 		vgpu_pci_cfg_mem_write(vgpu, offset, p_data, bytes);
vgpu              345 drivers/gpu/drm/i915/gvt/cfg_space.c 		vgpu_pci_cfg_mem_write(vgpu, offset, p_data, bytes);
vgpu              358 drivers/gpu/drm/i915/gvt/cfg_space.c void intel_vgpu_init_cfg_space(struct intel_vgpu *vgpu,
vgpu              361 drivers/gpu/drm/i915/gvt/cfg_space.c 	struct intel_gvt *gvt = vgpu->gvt;
vgpu              365 drivers/gpu/drm/i915/gvt/cfg_space.c 	memcpy(vgpu_cfg_space(vgpu), gvt->firmware.cfg_space,
vgpu              369 drivers/gpu/drm/i915/gvt/cfg_space.c 		vgpu_cfg_space(vgpu)[PCI_CLASS_DEVICE] =
vgpu              371 drivers/gpu/drm/i915/gvt/cfg_space.c 		vgpu_cfg_space(vgpu)[PCI_CLASS_PROG] =
vgpu              376 drivers/gpu/drm/i915/gvt/cfg_space.c 	gmch_ctl = (u16 *)(vgpu_cfg_space(vgpu) + INTEL_GVT_PCI_GMCH_CONTROL);
vgpu              379 drivers/gpu/drm/i915/gvt/cfg_space.c 	intel_vgpu_write_pci_bar(vgpu, PCI_BASE_ADDRESS_2,
vgpu              382 drivers/gpu/drm/i915/gvt/cfg_space.c 	vgpu_cfg_space(vgpu)[PCI_COMMAND] &= ~(PCI_COMMAND_IO
vgpu              388 drivers/gpu/drm/i915/gvt/cfg_space.c 	memset(vgpu_cfg_space(vgpu) + PCI_BASE_ADDRESS_1, 0, 4);
vgpu              389 drivers/gpu/drm/i915/gvt/cfg_space.c 	memset(vgpu_cfg_space(vgpu) + PCI_BASE_ADDRESS_3, 0, 4);
vgpu              390 drivers/gpu/drm/i915/gvt/cfg_space.c 	memset(vgpu_cfg_space(vgpu) + PCI_BASE_ADDRESS_4, 0, 8);
vgpu              391 drivers/gpu/drm/i915/gvt/cfg_space.c 	memset(vgpu_cfg_space(vgpu) + INTEL_GVT_PCI_OPREGION, 0, 4);
vgpu              393 drivers/gpu/drm/i915/gvt/cfg_space.c 	vgpu->cfg_space.bar[INTEL_GVT_PCI_BAR_GTTMMIO].size =
vgpu              395 drivers/gpu/drm/i915/gvt/cfg_space.c 	vgpu->cfg_space.bar[INTEL_GVT_PCI_BAR_APERTURE].size =
vgpu              398 drivers/gpu/drm/i915/gvt/cfg_space.c 	memset(vgpu_cfg_space(vgpu) + PCI_ROM_ADDRESS, 0, 4);
vgpu              407 drivers/gpu/drm/i915/gvt/cfg_space.c void intel_vgpu_reset_cfg_space(struct intel_vgpu *vgpu)
vgpu              409 drivers/gpu/drm/i915/gvt/cfg_space.c 	u8 cmd = vgpu_cfg_space(vgpu)[PCI_COMMAND];
vgpu              410 drivers/gpu/drm/i915/gvt/cfg_space.c 	bool primary = vgpu_cfg_space(vgpu)[PCI_CLASS_DEVICE] !=
vgpu              414 drivers/gpu/drm/i915/gvt/cfg_space.c 		trap_gttmmio(vgpu, false);
vgpu              415 drivers/gpu/drm/i915/gvt/cfg_space.c 		map_aperture(vgpu, false);
vgpu              423 drivers/gpu/drm/i915/gvt/cfg_space.c 	intel_vgpu_init_cfg_space(vgpu, primary);
vgpu              462 drivers/gpu/drm/i915/gvt/cmd_parser.c 	struct intel_vgpu *vgpu;
vgpu              502 drivers/gpu/drm/i915/gvt/cmd_parser.c 	(s->vgpu->gvt->device_info.gmadr_bytes_in_cmd >> 2)
vgpu              711 drivers/gpu/drm/i915/gvt/cmd_parser.c 			" ring_head(%08lx) ring_tail(%08lx)\n", s->vgpu->id,
vgpu              837 drivers/gpu/drm/i915/gvt/cmd_parser.c 	struct intel_gvt *gvt = s->vgpu->gvt;
vgpu              841 drivers/gpu/drm/i915/gvt/cmd_parser.c 	struct drm_i915_private *dev_priv = s->vgpu->gvt->dev_priv;
vgpu              875 drivers/gpu/drm/i915/gvt/cmd_parser.c 	vgpu_vreg(s->vgpu, offset) = cmd_val(s, index + 1);
vgpu              882 drivers/gpu/drm/i915/gvt/cmd_parser.c 	struct intel_vgpu *vgpu = s->vgpu;
vgpu              883 drivers/gpu/drm/i915/gvt/cmd_parser.c 	struct intel_gvt *gvt = vgpu->gvt;
vgpu              930 drivers/gpu/drm/i915/gvt/cmd_parser.c 		intel_gvt_hypervisor_read_gpa(s->vgpu,
vgpu              936 drivers/gpu/drm/i915/gvt/cmd_parser.c 			if (intel_gvt_mmio_has_mode_mask(s->vgpu->gvt, offset))
vgpu              937 drivers/gpu/drm/i915/gvt/cmd_parser.c 				intel_vgpu_mask_mmio_write(vgpu,
vgpu              940 drivers/gpu/drm/i915/gvt/cmd_parser.c 				vgpu_vreg(vgpu, offset) = data;
vgpu              965 drivers/gpu/drm/i915/gvt/cmd_parser.c 	struct intel_gvt *gvt = s->vgpu->gvt;
vgpu             1002 drivers/gpu/drm/i915/gvt/cmd_parser.c 		if (IS_BROADWELL(s->vgpu->gvt->dev_priv))
vgpu             1023 drivers/gpu/drm/i915/gvt/cmd_parser.c 	struct intel_gvt *gvt = s->vgpu->gvt;
vgpu             1052 drivers/gpu/drm/i915/gvt/cmd_parser.c 	int gmadr_bytes = s->vgpu->gvt->device_info.gmadr_bytes_in_cmd;
vgpu             1110 drivers/gpu/drm/i915/gvt/cmd_parser.c 	int gmadr_bytes = s->vgpu->gvt->device_info.gmadr_bytes_in_cmd;
vgpu             1142 drivers/gpu/drm/i915/gvt/cmd_parser.c 					hws_pga = s->vgpu->hws_pga[s->ring_id];
vgpu             1214 drivers/gpu/drm/i915/gvt/cmd_parser.c 	struct drm_i915_private *dev_priv = s->vgpu->gvt->dev_priv;
vgpu             1260 drivers/gpu/drm/i915/gvt/cmd_parser.c 	struct drm_i915_private *dev_priv = s->vgpu->gvt->dev_priv;
vgpu             1261 drivers/gpu/drm/i915/gvt/cmd_parser.c 	struct intel_vgpu *vgpu = s->vgpu;
vgpu             1319 drivers/gpu/drm/i915/gvt/cmd_parser.c 	struct drm_i915_private *dev_priv = s->vgpu->gvt->dev_priv;
vgpu             1326 drivers/gpu/drm/i915/gvt/cmd_parser.c 		stride = vgpu_vreg_t(s->vgpu, info->stride_reg) & GENMASK(9, 0);
vgpu             1327 drivers/gpu/drm/i915/gvt/cmd_parser.c 		tile = (vgpu_vreg_t(s->vgpu, info->ctrl_reg) &
vgpu             1330 drivers/gpu/drm/i915/gvt/cmd_parser.c 		stride = (vgpu_vreg_t(s->vgpu, info->stride_reg) &
vgpu             1332 drivers/gpu/drm/i915/gvt/cmd_parser.c 		tile = (vgpu_vreg_t(s->vgpu, info->ctrl_reg) & (1 << 10)) >> 10;
vgpu             1348 drivers/gpu/drm/i915/gvt/cmd_parser.c 	struct drm_i915_private *dev_priv = s->vgpu->gvt->dev_priv;
vgpu             1349 drivers/gpu/drm/i915/gvt/cmd_parser.c 	struct intel_vgpu *vgpu = s->vgpu;
vgpu             1351 drivers/gpu/drm/i915/gvt/cmd_parser.c 	set_mask_bits(&vgpu_vreg_t(vgpu, info->surf_reg), GENMASK(31, 12),
vgpu             1354 drivers/gpu/drm/i915/gvt/cmd_parser.c 		set_mask_bits(&vgpu_vreg_t(vgpu, info->stride_reg), GENMASK(9, 0),
vgpu             1356 drivers/gpu/drm/i915/gvt/cmd_parser.c 		set_mask_bits(&vgpu_vreg_t(vgpu, info->ctrl_reg), GENMASK(12, 10),
vgpu             1359 drivers/gpu/drm/i915/gvt/cmd_parser.c 		set_mask_bits(&vgpu_vreg_t(vgpu, info->stride_reg), GENMASK(15, 6),
vgpu             1361 drivers/gpu/drm/i915/gvt/cmd_parser.c 		set_mask_bits(&vgpu_vreg_t(vgpu, info->ctrl_reg), GENMASK(10, 10),
vgpu             1366 drivers/gpu/drm/i915/gvt/cmd_parser.c 		vgpu_vreg_t(vgpu, PIPE_FLIPCOUNT_G4X(info->pipe))++;
vgpu             1369 drivers/gpu/drm/i915/gvt/cmd_parser.c 		intel_vgpu_trigger_virtual_event(vgpu, info->event);
vgpu             1371 drivers/gpu/drm/i915/gvt/cmd_parser.c 		set_bit(info->event, vgpu->irq.flip_done_event[info->pipe]);
vgpu             1379 drivers/gpu/drm/i915/gvt/cmd_parser.c 	struct drm_i915_private *dev_priv = s->vgpu->gvt->dev_priv;
vgpu             1405 drivers/gpu/drm/i915/gvt/cmd_parser.c 	struct intel_vgpu *vgpu = s->vgpu;
vgpu             1467 drivers/gpu/drm/i915/gvt/cmd_parser.c 	struct intel_vgpu *vgpu = s->vgpu;
vgpu             1468 drivers/gpu/drm/i915/gvt/cmd_parser.c 	int gmadr_bytes = vgpu->gvt->device_info.gmadr_bytes_in_cmd;
vgpu             1488 drivers/gpu/drm/i915/gvt/cmd_parser.c 	struct intel_vgpu *vgpu = s->vgpu;
vgpu             1489 drivers/gpu/drm/i915/gvt/cmd_parser.c 	u32 max_surface_size = vgpu->gvt->device_info.max_surface_size;
vgpu             1504 drivers/gpu/drm/i915/gvt/cmd_parser.c 	} else if (!intel_gvt_ggtt_validate_range(vgpu, guest_gma, op_size)) {
vgpu             1523 drivers/gpu/drm/i915/gvt/cmd_parser.c 			vgpu->id,
vgpu             1524 drivers/gpu/drm/i915/gvt/cmd_parser.c 			vgpu_aperture_gmadr_base(vgpu),
vgpu             1525 drivers/gpu/drm/i915/gvt/cmd_parser.c 			vgpu_aperture_gmadr_end(vgpu),
vgpu             1526 drivers/gpu/drm/i915/gvt/cmd_parser.c 			vgpu_hidden_gmadr_base(vgpu),
vgpu             1527 drivers/gpu/drm/i915/gvt/cmd_parser.c 			vgpu_hidden_gmadr_end(vgpu));
vgpu             1533 drivers/gpu/drm/i915/gvt/cmd_parser.c 	int gmadr_bytes = s->vgpu->gvt->device_info.gmadr_bytes_in_cmd;
vgpu             1566 drivers/gpu/drm/i915/gvt/cmd_parser.c 	struct intel_vgpu *vgpu = s->vgpu;
vgpu             1590 drivers/gpu/drm/i915/gvt/cmd_parser.c 	int gmadr_bytes = s->vgpu->gvt->device_info.gmadr_bytes_in_cmd;
vgpu             1640 drivers/gpu/drm/i915/gvt/cmd_parser.c 	int gmadr_bytes = s->vgpu->gvt->device_info.gmadr_bytes_in_cmd;
vgpu             1668 drivers/gpu/drm/i915/gvt/cmd_parser.c 			hws_pga = s->vgpu->hws_pga[s->ring_id];
vgpu             1691 drivers/gpu/drm/i915/gvt/cmd_parser.c static int copy_gma_to_hva(struct intel_vgpu *vgpu, struct intel_vgpu_mm *mm,
vgpu             1710 drivers/gpu/drm/i915/gvt/cmd_parser.c 		intel_gvt_hypervisor_read_gpa(vgpu, gpa, va + len, copy_len);
vgpu             1727 drivers/gpu/drm/i915/gvt/cmd_parser.c 			!(s->vgpu->scan_nonprivbb & (1 << s->ring_id)))
vgpu             1740 drivers/gpu/drm/i915/gvt/cmd_parser.c 	struct intel_vgpu *vgpu = s->vgpu;
vgpu             1743 drivers/gpu/drm/i915/gvt/cmd_parser.c 		s->vgpu->gtt.ggtt_mm : s->workload->shadow_mm;
vgpu             1754 drivers/gpu/drm/i915/gvt/cmd_parser.c 	info = get_cmd_info(s->vgpu->gvt, cmd, s->ring_id);
vgpu             1763 drivers/gpu/drm/i915/gvt/cmd_parser.c 		if (copy_gma_to_hva(s->vgpu, mm,
vgpu             1766 drivers/gpu/drm/i915/gvt/cmd_parser.c 		info = get_cmd_info(s->vgpu->gvt, cmd, s->ring_id);
vgpu             1796 drivers/gpu/drm/i915/gvt/cmd_parser.c 	struct intel_vgpu *vgpu = s->vgpu;
vgpu             1800 drivers/gpu/drm/i915/gvt/cmd_parser.c 	info = get_cmd_info(s->vgpu->gvt, cmd, s->ring_id);
vgpu             1819 drivers/gpu/drm/i915/gvt/cmd_parser.c 	struct intel_vgpu *vgpu = s->vgpu;
vgpu             1826 drivers/gpu/drm/i915/gvt/cmd_parser.c 		s->vgpu->gtt.ggtt_mm : s->workload->shadow_mm;
vgpu             1858 drivers/gpu/drm/i915/gvt/cmd_parser.c 	bb->obj = i915_gem_object_create_shmem(s->vgpu->gvt->dev_priv,
vgpu             1881 drivers/gpu/drm/i915/gvt/cmd_parser.c 	ret = copy_gma_to_hva(s->vgpu, mm,
vgpu             1931 drivers/gpu/drm/i915/gvt/cmd_parser.c 	struct intel_vgpu *vgpu = s->vgpu;
vgpu             2656 drivers/gpu/drm/i915/gvt/cmd_parser.c 	struct intel_vgpu *vgpu = s->vgpu;
vgpu             2667 drivers/gpu/drm/i915/gvt/cmd_parser.c 		info = get_cmd_info(s->vgpu->gvt, cmd, s->ring_id);
vgpu             2679 drivers/gpu/drm/i915/gvt/cmd_parser.c 	trace_gvt_command(vgpu->id, s->ring_id, s->ip_gma, s->ip_va,
vgpu             2728 drivers/gpu/drm/i915/gvt/cmd_parser.c 	struct intel_vgpu *vgpu = s->vgpu;
vgpu             2781 drivers/gpu/drm/i915/gvt/cmd_parser.c 	s.vgpu = workload->vgpu;
vgpu             2830 drivers/gpu/drm/i915/gvt/cmd_parser.c 	s.vgpu = workload->vgpu;
vgpu             2852 drivers/gpu/drm/i915/gvt/cmd_parser.c 	struct intel_vgpu *vgpu = workload->vgpu;
vgpu             2853 drivers/gpu/drm/i915/gvt/cmd_parser.c 	struct intel_vgpu_submission *s = &vgpu->submission;
vgpu             2890 drivers/gpu/drm/i915/gvt/cmd_parser.c 		ret = copy_gma_to_hva(vgpu, vgpu->gtt.ggtt_mm,
vgpu             2901 drivers/gpu/drm/i915/gvt/cmd_parser.c 	ret = copy_gma_to_hva(vgpu, vgpu->gtt.ggtt_mm, gma_head, gma_tail,
vgpu             2913 drivers/gpu/drm/i915/gvt/cmd_parser.c 	struct intel_vgpu *vgpu = workload->vgpu;
vgpu             2936 drivers/gpu/drm/i915/gvt/cmd_parser.c 	struct intel_vgpu *vgpu = workload->vgpu;
vgpu             2941 drivers/gpu/drm/i915/gvt/cmd_parser.c 	obj = i915_gem_object_create_shmem(workload->vgpu->gvt->dev_priv,
vgpu             2963 drivers/gpu/drm/i915/gvt/cmd_parser.c 	ret = copy_gma_to_hva(workload->vgpu,
vgpu             2964 drivers/gpu/drm/i915/gvt/cmd_parser.c 				workload->vgpu->gtt.ggtt_mm,
vgpu             3008 drivers/gpu/drm/i915/gvt/cmd_parser.c 	struct intel_vgpu *vgpu = workload->vgpu;
vgpu               32 drivers/gpu/drm/i915/gvt/debug.h 	if (IS_ERR_OR_NULL(vgpu))					\
vgpu               35 drivers/gpu/drm/i915/gvt/debug.h 		pr_err("gvt: vgpu %d: "fmt, vgpu->id, ##args);\
vgpu               29 drivers/gpu/drm/i915/gvt/debugfs.c 	struct intel_vgpu *vgpu;
vgpu               67 drivers/gpu/drm/i915/gvt/debugfs.c 	vreg = vgpu_vreg(param->vgpu, offset);
vgpu               87 drivers/gpu/drm/i915/gvt/debugfs.c 	struct intel_vgpu *vgpu = s->private;
vgpu               88 drivers/gpu/drm/i915/gvt/debugfs.c 	struct intel_gvt *gvt = vgpu->gvt;
vgpu               90 drivers/gpu/drm/i915/gvt/debugfs.c 		.vgpu = vgpu,
vgpu              130 drivers/gpu/drm/i915/gvt/debugfs.c 	struct intel_vgpu *vgpu = (struct intel_vgpu *)data;
vgpu              131 drivers/gpu/drm/i915/gvt/debugfs.c 	*val = vgpu->scan_nonprivbb;
vgpu              144 drivers/gpu/drm/i915/gvt/debugfs.c 	struct intel_vgpu *vgpu = (struct intel_vgpu *)data;
vgpu              145 drivers/gpu/drm/i915/gvt/debugfs.c 	struct drm_i915_private *dev_priv = vgpu->gvt->dev_priv;
vgpu              152 drivers/gpu/drm/i915/gvt/debugfs.c 	if (vgpu->scan_nonprivbb == val)
vgpu              160 drivers/gpu/drm/i915/gvt/debugfs.c 		vgpu->id);
vgpu              181 drivers/gpu/drm/i915/gvt/debugfs.c 	vgpu->scan_nonprivbb = val;
vgpu              193 drivers/gpu/drm/i915/gvt/debugfs.c void intel_gvt_debugfs_add_vgpu(struct intel_vgpu *vgpu)
vgpu              197 drivers/gpu/drm/i915/gvt/debugfs.c 	snprintf(name, 16, "vgpu%d", vgpu->id);
vgpu              198 drivers/gpu/drm/i915/gvt/debugfs.c 	vgpu->debugfs = debugfs_create_dir(name, vgpu->gvt->debugfs_root);
vgpu              200 drivers/gpu/drm/i915/gvt/debugfs.c 	debugfs_create_bool("active", 0444, vgpu->debugfs, &vgpu->active);
vgpu              201 drivers/gpu/drm/i915/gvt/debugfs.c 	debugfs_create_file("mmio_diff", 0444, vgpu->debugfs, vgpu,
vgpu              203 drivers/gpu/drm/i915/gvt/debugfs.c 	debugfs_create_file("scan_nonprivbb", 0644, vgpu->debugfs, vgpu,
vgpu              211 drivers/gpu/drm/i915/gvt/debugfs.c void intel_gvt_debugfs_remove_vgpu(struct intel_vgpu *vgpu)
vgpu              213 drivers/gpu/drm/i915/gvt/debugfs.c 	debugfs_remove_recursive(vgpu->debugfs);
vgpu              214 drivers/gpu/drm/i915/gvt/debugfs.c 	vgpu->debugfs = NULL;
vgpu               38 drivers/gpu/drm/i915/gvt/display.c static int get_edp_pipe(struct intel_vgpu *vgpu)
vgpu               40 drivers/gpu/drm/i915/gvt/display.c 	u32 data = vgpu_vreg(vgpu, _TRANS_DDI_FUNC_CTL_EDP);
vgpu               58 drivers/gpu/drm/i915/gvt/display.c static int edp_pipe_is_enabled(struct intel_vgpu *vgpu)
vgpu               60 drivers/gpu/drm/i915/gvt/display.c 	struct drm_i915_private *dev_priv = vgpu->gvt->dev_priv;
vgpu               62 drivers/gpu/drm/i915/gvt/display.c 	if (!(vgpu_vreg_t(vgpu, PIPECONF(_PIPE_EDP)) & PIPECONF_ENABLE))
vgpu               65 drivers/gpu/drm/i915/gvt/display.c 	if (!(vgpu_vreg(vgpu, _TRANS_DDI_FUNC_CTL_EDP) & TRANS_DDI_FUNC_ENABLE))
vgpu               70 drivers/gpu/drm/i915/gvt/display.c int pipe_is_enabled(struct intel_vgpu *vgpu, int pipe)
vgpu               72 drivers/gpu/drm/i915/gvt/display.c 	struct drm_i915_private *dev_priv = vgpu->gvt->dev_priv;
vgpu               77 drivers/gpu/drm/i915/gvt/display.c 	if (vgpu_vreg_t(vgpu, PIPECONF(pipe)) & PIPECONF_ENABLE)
vgpu               80 drivers/gpu/drm/i915/gvt/display.c 	if (edp_pipe_is_enabled(vgpu) &&
vgpu               81 drivers/gpu/drm/i915/gvt/display.c 			get_edp_pipe(vgpu) == pipe)
vgpu              169 drivers/gpu/drm/i915/gvt/display.c static void emulate_monitor_status_change(struct intel_vgpu *vgpu)
vgpu              171 drivers/gpu/drm/i915/gvt/display.c 	struct drm_i915_private *dev_priv = vgpu->gvt->dev_priv;
vgpu              175 drivers/gpu/drm/i915/gvt/display.c 		vgpu_vreg_t(vgpu, GEN8_DE_PORT_ISR) &= ~(BXT_DE_PORT_HP_DDIA |
vgpu              179 drivers/gpu/drm/i915/gvt/display.c 		if (intel_vgpu_has_monitor_on_port(vgpu, PORT_A)) {
vgpu              180 drivers/gpu/drm/i915/gvt/display.c 			vgpu_vreg_t(vgpu, GEN8_DE_PORT_ISR) |=
vgpu              184 drivers/gpu/drm/i915/gvt/display.c 		if (intel_vgpu_has_monitor_on_port(vgpu, PORT_B)) {
vgpu              185 drivers/gpu/drm/i915/gvt/display.c 			vgpu_vreg_t(vgpu, GEN8_DE_PORT_ISR) |=
vgpu              189 drivers/gpu/drm/i915/gvt/display.c 		if (intel_vgpu_has_monitor_on_port(vgpu, PORT_C)) {
vgpu              190 drivers/gpu/drm/i915/gvt/display.c 			vgpu_vreg_t(vgpu, GEN8_DE_PORT_ISR) |=
vgpu              197 drivers/gpu/drm/i915/gvt/display.c 	vgpu_vreg_t(vgpu, SDEISR) &= ~(SDE_PORTB_HOTPLUG_CPT |
vgpu              203 drivers/gpu/drm/i915/gvt/display.c 		vgpu_vreg_t(vgpu, SDEISR) &= ~(SDE_PORTA_HOTPLUG_SPT |
vgpu              205 drivers/gpu/drm/i915/gvt/display.c 		vgpu_vreg_t(vgpu, SKL_FUSE_STATUS) |=
vgpu              218 drivers/gpu/drm/i915/gvt/display.c 		vgpu_vreg_t(vgpu, DPLL_CTRL1) =
vgpu              220 drivers/gpu/drm/i915/gvt/display.c 		vgpu_vreg_t(vgpu, DPLL_CTRL1) |=
vgpu              222 drivers/gpu/drm/i915/gvt/display.c 		vgpu_vreg_t(vgpu, LCPLL1_CTL) =
vgpu              224 drivers/gpu/drm/i915/gvt/display.c 		vgpu_vreg_t(vgpu, DPLL_STATUS) = DPLL_LOCK(DPLL_ID_SKL_DPLL0);
vgpu              231 drivers/gpu/drm/i915/gvt/display.c 		vgpu_vreg_t(vgpu, PIPE_DATA_M1(TRANSCODER_A)) = 63 << TU_SIZE_SHIFT;
vgpu              232 drivers/gpu/drm/i915/gvt/display.c 		vgpu_vreg_t(vgpu, PIPE_DATA_M1(TRANSCODER_A)) |= 0x5b425e;
vgpu              233 drivers/gpu/drm/i915/gvt/display.c 		vgpu_vreg_t(vgpu, PIPE_DATA_N1(TRANSCODER_A)) = 0x800000;
vgpu              234 drivers/gpu/drm/i915/gvt/display.c 		vgpu_vreg_t(vgpu, PIPE_LINK_M1(TRANSCODER_A)) = 0x3cd6e;
vgpu              235 drivers/gpu/drm/i915/gvt/display.c 		vgpu_vreg_t(vgpu, PIPE_LINK_N1(TRANSCODER_A)) = 0x80000;
vgpu              238 drivers/gpu/drm/i915/gvt/display.c 	if (intel_vgpu_has_monitor_on_port(vgpu, PORT_B)) {
vgpu              239 drivers/gpu/drm/i915/gvt/display.c 		vgpu_vreg_t(vgpu, DPLL_CTRL2) &=
vgpu              241 drivers/gpu/drm/i915/gvt/display.c 		vgpu_vreg_t(vgpu, DPLL_CTRL2) |=
vgpu              243 drivers/gpu/drm/i915/gvt/display.c 		vgpu_vreg_t(vgpu, DPLL_CTRL2) |=
vgpu              245 drivers/gpu/drm/i915/gvt/display.c 		vgpu_vreg_t(vgpu, SFUSE_STRAP) |= SFUSE_STRAP_DDIB_DETECTED;
vgpu              246 drivers/gpu/drm/i915/gvt/display.c 		vgpu_vreg_t(vgpu, TRANS_DDI_FUNC_CTL(TRANSCODER_A)) &=
vgpu              249 drivers/gpu/drm/i915/gvt/display.c 		vgpu_vreg_t(vgpu, TRANS_DDI_FUNC_CTL(TRANSCODER_A)) |=
vgpu              254 drivers/gpu/drm/i915/gvt/display.c 			vgpu_vreg_t(vgpu, PORT_CLK_SEL(PORT_B)) &=
vgpu              256 drivers/gpu/drm/i915/gvt/display.c 			vgpu_vreg_t(vgpu, PORT_CLK_SEL(PORT_B)) |=
vgpu              259 drivers/gpu/drm/i915/gvt/display.c 		vgpu_vreg_t(vgpu, DDI_BUF_CTL(PORT_B)) |= DDI_BUF_CTL_ENABLE;
vgpu              260 drivers/gpu/drm/i915/gvt/display.c 		vgpu_vreg_t(vgpu, DDI_BUF_CTL(PORT_B)) &= ~DDI_BUF_IS_IDLE;
vgpu              261 drivers/gpu/drm/i915/gvt/display.c 		vgpu_vreg_t(vgpu, SDEISR) |= SDE_PORTB_HOTPLUG_CPT;
vgpu              264 drivers/gpu/drm/i915/gvt/display.c 	if (intel_vgpu_has_monitor_on_port(vgpu, PORT_C)) {
vgpu              265 drivers/gpu/drm/i915/gvt/display.c 		vgpu_vreg_t(vgpu, DPLL_CTRL2) &=
vgpu              267 drivers/gpu/drm/i915/gvt/display.c 		vgpu_vreg_t(vgpu, DPLL_CTRL2) |=
vgpu              269 drivers/gpu/drm/i915/gvt/display.c 		vgpu_vreg_t(vgpu, DPLL_CTRL2) |=
vgpu              271 drivers/gpu/drm/i915/gvt/display.c 		vgpu_vreg_t(vgpu, SDEISR) |= SDE_PORTC_HOTPLUG_CPT;
vgpu              272 drivers/gpu/drm/i915/gvt/display.c 		vgpu_vreg_t(vgpu, TRANS_DDI_FUNC_CTL(TRANSCODER_A)) &=
vgpu              275 drivers/gpu/drm/i915/gvt/display.c 		vgpu_vreg_t(vgpu, TRANS_DDI_FUNC_CTL(TRANSCODER_A)) |=
vgpu              280 drivers/gpu/drm/i915/gvt/display.c 			vgpu_vreg_t(vgpu, PORT_CLK_SEL(PORT_C)) &=
vgpu              282 drivers/gpu/drm/i915/gvt/display.c 			vgpu_vreg_t(vgpu, PORT_CLK_SEL(PORT_C)) |=
vgpu              285 drivers/gpu/drm/i915/gvt/display.c 		vgpu_vreg_t(vgpu, DDI_BUF_CTL(PORT_C)) |= DDI_BUF_CTL_ENABLE;
vgpu              286 drivers/gpu/drm/i915/gvt/display.c 		vgpu_vreg_t(vgpu, DDI_BUF_CTL(PORT_C)) &= ~DDI_BUF_IS_IDLE;
vgpu              287 drivers/gpu/drm/i915/gvt/display.c 		vgpu_vreg_t(vgpu, SFUSE_STRAP) |= SFUSE_STRAP_DDIC_DETECTED;
vgpu              290 drivers/gpu/drm/i915/gvt/display.c 	if (intel_vgpu_has_monitor_on_port(vgpu, PORT_D)) {
vgpu              291 drivers/gpu/drm/i915/gvt/display.c 		vgpu_vreg_t(vgpu, DPLL_CTRL2) &=
vgpu              293 drivers/gpu/drm/i915/gvt/display.c 		vgpu_vreg_t(vgpu, DPLL_CTRL2) |=
vgpu              295 drivers/gpu/drm/i915/gvt/display.c 		vgpu_vreg_t(vgpu, DPLL_CTRL2) |=
vgpu              297 drivers/gpu/drm/i915/gvt/display.c 		vgpu_vreg_t(vgpu, SDEISR) |= SDE_PORTD_HOTPLUG_CPT;
vgpu              298 drivers/gpu/drm/i915/gvt/display.c 		vgpu_vreg_t(vgpu, TRANS_DDI_FUNC_CTL(TRANSCODER_A)) &=
vgpu              301 drivers/gpu/drm/i915/gvt/display.c 		vgpu_vreg_t(vgpu, TRANS_DDI_FUNC_CTL(TRANSCODER_A)) |=
vgpu              306 drivers/gpu/drm/i915/gvt/display.c 			vgpu_vreg_t(vgpu, PORT_CLK_SEL(PORT_D)) &=
vgpu              308 drivers/gpu/drm/i915/gvt/display.c 			vgpu_vreg_t(vgpu, PORT_CLK_SEL(PORT_D)) |=
vgpu              311 drivers/gpu/drm/i915/gvt/display.c 		vgpu_vreg_t(vgpu, DDI_BUF_CTL(PORT_D)) |= DDI_BUF_CTL_ENABLE;
vgpu              312 drivers/gpu/drm/i915/gvt/display.c 		vgpu_vreg_t(vgpu, DDI_BUF_CTL(PORT_D)) &= ~DDI_BUF_IS_IDLE;
vgpu              313 drivers/gpu/drm/i915/gvt/display.c 		vgpu_vreg_t(vgpu, SFUSE_STRAP) |= SFUSE_STRAP_DDID_DETECTED;
vgpu              318 drivers/gpu/drm/i915/gvt/display.c 			intel_vgpu_has_monitor_on_port(vgpu, PORT_E)) {
vgpu              319 drivers/gpu/drm/i915/gvt/display.c 		vgpu_vreg_t(vgpu, SDEISR) |= SDE_PORTE_HOTPLUG_SPT;
vgpu              322 drivers/gpu/drm/i915/gvt/display.c 	if (intel_vgpu_has_monitor_on_port(vgpu, PORT_A)) {
vgpu              324 drivers/gpu/drm/i915/gvt/display.c 			vgpu_vreg_t(vgpu, GEN8_DE_PORT_ISR) |=
vgpu              327 drivers/gpu/drm/i915/gvt/display.c 			vgpu_vreg_t(vgpu, SDEISR) |= SDE_PORTA_HOTPLUG_SPT;
vgpu              329 drivers/gpu/drm/i915/gvt/display.c 		vgpu_vreg_t(vgpu, DDI_BUF_CTL(PORT_A)) |= DDI_INIT_DISPLAY_DETECTED;
vgpu              334 drivers/gpu/drm/i915/gvt/display.c 		vgpu_vreg_t(vgpu, PCH_ADPA) &= ~ADPA_CRT_HOTPLUG_MONITOR_MASK;
vgpu              338 drivers/gpu/drm/i915/gvt/display.c 		vgpu_vreg_t(vgpu, DSPCNTR(pipe)) &= ~DISPLAY_PLANE_ENABLE;
vgpu              339 drivers/gpu/drm/i915/gvt/display.c 		vgpu_vreg_t(vgpu, SPRCTL(pipe)) &= ~SPRITE_ENABLE;
vgpu              340 drivers/gpu/drm/i915/gvt/display.c 		vgpu_vreg_t(vgpu, CURCNTR(pipe)) &= ~MCURSOR_MODE;
vgpu              341 drivers/gpu/drm/i915/gvt/display.c 		vgpu_vreg_t(vgpu, CURCNTR(pipe)) |= MCURSOR_MODE_DISABLE;
vgpu              344 drivers/gpu/drm/i915/gvt/display.c 	vgpu_vreg_t(vgpu, PIPECONF(PIPE_A)) |= PIPECONF_ENABLE;
vgpu              347 drivers/gpu/drm/i915/gvt/display.c static void clean_virtual_dp_monitor(struct intel_vgpu *vgpu, int port_num)
vgpu              349 drivers/gpu/drm/i915/gvt/display.c 	struct intel_vgpu_port *port = intel_vgpu_port(vgpu, port_num);
vgpu              358 drivers/gpu/drm/i915/gvt/display.c static int setup_virtual_dp_monitor(struct intel_vgpu *vgpu, int port_num,
vgpu              361 drivers/gpu/drm/i915/gvt/display.c 	struct intel_vgpu_port *port = intel_vgpu_port(vgpu, port_num);
vgpu              386 drivers/gpu/drm/i915/gvt/display.c 	emulate_monitor_status_change(vgpu);
vgpu              403 drivers/gpu/drm/i915/gvt/display.c 	struct intel_vgpu *vgpu;
vgpu              408 drivers/gpu/drm/i915/gvt/display.c 	for_each_active_vgpu(gvt, vgpu, id) {
vgpu              410 drivers/gpu/drm/i915/gvt/display.c 			if (pipe_is_enabled(vgpu, pipe)) {
vgpu              429 drivers/gpu/drm/i915/gvt/display.c static void emulate_vblank_on_pipe(struct intel_vgpu *vgpu, int pipe)
vgpu              431 drivers/gpu/drm/i915/gvt/display.c 	struct drm_i915_private *dev_priv = vgpu->gvt->dev_priv;
vgpu              432 drivers/gpu/drm/i915/gvt/display.c 	struct intel_vgpu_irq *irq = &vgpu->irq;
vgpu              446 drivers/gpu/drm/i915/gvt/display.c 		if (!pipe_is_enabled(vgpu, pipe))
vgpu              449 drivers/gpu/drm/i915/gvt/display.c 		intel_vgpu_trigger_virtual_event(vgpu, event);
vgpu              452 drivers/gpu/drm/i915/gvt/display.c 	if (pipe_is_enabled(vgpu, pipe)) {
vgpu              453 drivers/gpu/drm/i915/gvt/display.c 		vgpu_vreg_t(vgpu, PIPE_FRMCOUNT_G4X(pipe))++;
vgpu              454 drivers/gpu/drm/i915/gvt/display.c 		intel_vgpu_trigger_virtual_event(vgpu, vblank_event[pipe]);
vgpu              458 drivers/gpu/drm/i915/gvt/display.c static void emulate_vblank(struct intel_vgpu *vgpu)
vgpu              462 drivers/gpu/drm/i915/gvt/display.c 	mutex_lock(&vgpu->vgpu_lock);
vgpu              463 drivers/gpu/drm/i915/gvt/display.c 	for_each_pipe(vgpu->gvt->dev_priv, pipe)
vgpu              464 drivers/gpu/drm/i915/gvt/display.c 		emulate_vblank_on_pipe(vgpu, pipe);
vgpu              465 drivers/gpu/drm/i915/gvt/display.c 	mutex_unlock(&vgpu->vgpu_lock);
vgpu              477 drivers/gpu/drm/i915/gvt/display.c 	struct intel_vgpu *vgpu;
vgpu              481 drivers/gpu/drm/i915/gvt/display.c 	for_each_active_vgpu(gvt, vgpu, id)
vgpu              482 drivers/gpu/drm/i915/gvt/display.c 		emulate_vblank(vgpu);
vgpu              494 drivers/gpu/drm/i915/gvt/display.c void intel_vgpu_emulate_hotplug(struct intel_vgpu *vgpu, bool connected)
vgpu              496 drivers/gpu/drm/i915/gvt/display.c 	struct drm_i915_private *dev_priv = vgpu->gvt->dev_priv;
vgpu              502 drivers/gpu/drm/i915/gvt/display.c 			vgpu_vreg_t(vgpu, SFUSE_STRAP) |=
vgpu              504 drivers/gpu/drm/i915/gvt/display.c 			vgpu_vreg_t(vgpu, SDEISR) |= SDE_PORTD_HOTPLUG_CPT;
vgpu              506 drivers/gpu/drm/i915/gvt/display.c 			vgpu_vreg_t(vgpu, SFUSE_STRAP) &=
vgpu              508 drivers/gpu/drm/i915/gvt/display.c 			vgpu_vreg_t(vgpu, SDEISR) &= ~SDE_PORTD_HOTPLUG_CPT;
vgpu              510 drivers/gpu/drm/i915/gvt/display.c 		vgpu_vreg_t(vgpu, SDEIIR) |= SDE_PORTD_HOTPLUG_CPT;
vgpu              511 drivers/gpu/drm/i915/gvt/display.c 		vgpu_vreg_t(vgpu, PCH_PORT_HOTPLUG) |=
vgpu              513 drivers/gpu/drm/i915/gvt/display.c 		intel_vgpu_trigger_virtual_event(vgpu, DP_D_HOTPLUG);
vgpu              524 drivers/gpu/drm/i915/gvt/display.c void intel_vgpu_clean_display(struct intel_vgpu *vgpu)
vgpu              526 drivers/gpu/drm/i915/gvt/display.c 	struct drm_i915_private *dev_priv = vgpu->gvt->dev_priv;
vgpu              530 drivers/gpu/drm/i915/gvt/display.c 		clean_virtual_dp_monitor(vgpu, PORT_D);
vgpu              532 drivers/gpu/drm/i915/gvt/display.c 		clean_virtual_dp_monitor(vgpu, PORT_B);
vgpu              546 drivers/gpu/drm/i915/gvt/display.c int intel_vgpu_init_display(struct intel_vgpu *vgpu, u64 resolution)
vgpu              548 drivers/gpu/drm/i915/gvt/display.c 	struct drm_i915_private *dev_priv = vgpu->gvt->dev_priv;
vgpu              550 drivers/gpu/drm/i915/gvt/display.c 	intel_vgpu_init_i2c_edid(vgpu);
vgpu              554 drivers/gpu/drm/i915/gvt/display.c 		return setup_virtual_dp_monitor(vgpu, PORT_D, GVT_DP_D,
vgpu              557 drivers/gpu/drm/i915/gvt/display.c 		return setup_virtual_dp_monitor(vgpu, PORT_B, GVT_DP_B,
vgpu              568 drivers/gpu/drm/i915/gvt/display.c void intel_vgpu_reset_display(struct intel_vgpu *vgpu)
vgpu              570 drivers/gpu/drm/i915/gvt/display.c 	emulate_monitor_status_change(vgpu);
vgpu               41 drivers/gpu/drm/i915/gvt/display.h #define intel_vgpu_port(vgpu, port) \
vgpu               42 drivers/gpu/drm/i915/gvt/display.h 	(&(vgpu->display.ports[port]))
vgpu               44 drivers/gpu/drm/i915/gvt/display.h #define intel_vgpu_has_monitor_on_port(vgpu, port) \
vgpu               45 drivers/gpu/drm/i915/gvt/display.h 	(intel_vgpu_port(vgpu, port)->edid && \
vgpu               46 drivers/gpu/drm/i915/gvt/display.h 		intel_vgpu_port(vgpu, port)->edid->data_valid)
vgpu               48 drivers/gpu/drm/i915/gvt/display.h #define intel_vgpu_port_is_dp(vgpu, port) \
vgpu               49 drivers/gpu/drm/i915/gvt/display.h 	((intel_vgpu_port(vgpu, port)->type == GVT_DP_A) || \
vgpu               50 drivers/gpu/drm/i915/gvt/display.h 	(intel_vgpu_port(vgpu, port)->type == GVT_DP_B) || \
vgpu               51 drivers/gpu/drm/i915/gvt/display.h 	(intel_vgpu_port(vgpu, port)->type == GVT_DP_C) || \
vgpu               52 drivers/gpu/drm/i915/gvt/display.h 	(intel_vgpu_port(vgpu, port)->type == GVT_DP_D))
vgpu              203 drivers/gpu/drm/i915/gvt/display.h int intel_vgpu_init_display(struct intel_vgpu *vgpu, u64 resolution);
vgpu              204 drivers/gpu/drm/i915/gvt/display.h void intel_vgpu_reset_display(struct intel_vgpu *vgpu);
vgpu              205 drivers/gpu/drm/i915/gvt/display.h void intel_vgpu_clean_display(struct intel_vgpu *vgpu);
vgpu              207 drivers/gpu/drm/i915/gvt/display.h int pipe_is_enabled(struct intel_vgpu *vgpu, int pipe);
vgpu               90 drivers/gpu/drm/i915/gvt/dmabuf.c 	struct intel_vgpu *vgpu = obj->vgpu;
vgpu               94 drivers/gpu/drm/i915/gvt/dmabuf.c 	if (vgpu && vgpu->active && !list_empty(&vgpu->dmabuf_obj_list_head)) {
vgpu               95 drivers/gpu/drm/i915/gvt/dmabuf.c 		list_for_each(pos, &vgpu->dmabuf_obj_list_head) {
vgpu              100 drivers/gpu/drm/i915/gvt/dmabuf.c 				intel_gvt_hypervisor_put_vfio_device(vgpu);
vgpu              101 drivers/gpu/drm/i915/gvt/dmabuf.c 				idr_remove(&vgpu->object_idr,
vgpu              131 drivers/gpu/drm/i915/gvt/dmabuf.c 	struct intel_vgpu *vgpu = obj->vgpu;
vgpu              133 drivers/gpu/drm/i915/gvt/dmabuf.c 	if (vgpu) {
vgpu              134 drivers/gpu/drm/i915/gvt/dmabuf.c 		mutex_lock(&vgpu->dmabuf_lock);
vgpu              137 drivers/gpu/drm/i915/gvt/dmabuf.c 		mutex_unlock(&vgpu->dmabuf_lock);
vgpu              207 drivers/gpu/drm/i915/gvt/dmabuf.c 		struct intel_vgpu *vgpu,
vgpu              218 drivers/gpu/drm/i915/gvt/dmabuf.c 		ret = intel_vgpu_decode_primary_plane(vgpu, &p);
vgpu              248 drivers/gpu/drm/i915/gvt/dmabuf.c 		ret = intel_vgpu_decode_cursor_plane(vgpu, &c);
vgpu              284 drivers/gpu/drm/i915/gvt/dmabuf.c 	if (!intel_gvt_ggtt_validate_range(vgpu, info->start, info->size)) {
vgpu              293 drivers/gpu/drm/i915/gvt/dmabuf.c pick_dmabuf_by_info(struct intel_vgpu *vgpu,
vgpu              301 drivers/gpu/drm/i915/gvt/dmabuf.c 	list_for_each(pos, &vgpu->dmabuf_obj_list_head) {
vgpu              325 drivers/gpu/drm/i915/gvt/dmabuf.c pick_dmabuf_by_num(struct intel_vgpu *vgpu, u32 id)
vgpu              331 drivers/gpu/drm/i915/gvt/dmabuf.c 	list_for_each(pos, &vgpu->dmabuf_obj_list_head) {
vgpu              361 drivers/gpu/drm/i915/gvt/dmabuf.c int intel_vgpu_query_plane(struct intel_vgpu *vgpu, void *args)
vgpu              363 drivers/gpu/drm/i915/gvt/dmabuf.c 	struct drm_device *dev = &vgpu->gvt->dev_priv->drm;
vgpu              376 drivers/gpu/drm/i915/gvt/dmabuf.c 	ret = vgpu_get_plane_info(dev, vgpu, &fb_info,
vgpu              381 drivers/gpu/drm/i915/gvt/dmabuf.c 	mutex_lock(&vgpu->dmabuf_lock);
vgpu              383 drivers/gpu/drm/i915/gvt/dmabuf.c 	dmabuf_obj = pick_dmabuf_by_info(vgpu, &fb_info);
vgpu              398 drivers/gpu/drm/i915/gvt/dmabuf.c 			    vgpu->id, kref_read(&dmabuf_obj->kref),
vgpu              400 drivers/gpu/drm/i915/gvt/dmabuf.c 		mutex_unlock(&vgpu->dmabuf_lock);
vgpu              404 drivers/gpu/drm/i915/gvt/dmabuf.c 	mutex_unlock(&vgpu->dmabuf_lock);
vgpu              425 drivers/gpu/drm/i915/gvt/dmabuf.c 	dmabuf_obj->vgpu = vgpu;
vgpu              427 drivers/gpu/drm/i915/gvt/dmabuf.c 	ret = idr_alloc(&vgpu->object_idr, dmabuf_obj, 1, 0, GFP_NOWAIT);
vgpu              437 drivers/gpu/drm/i915/gvt/dmabuf.c 	mutex_lock(&vgpu->dmabuf_lock);
vgpu              438 drivers/gpu/drm/i915/gvt/dmabuf.c 	if (intel_gvt_hypervisor_get_vfio_device(vgpu)) {
vgpu              440 drivers/gpu/drm/i915/gvt/dmabuf.c 		mutex_unlock(&vgpu->dmabuf_lock);
vgpu              443 drivers/gpu/drm/i915/gvt/dmabuf.c 	mutex_unlock(&vgpu->dmabuf_lock);
vgpu              448 drivers/gpu/drm/i915/gvt/dmabuf.c 	mutex_lock(&vgpu->dmabuf_lock);
vgpu              449 drivers/gpu/drm/i915/gvt/dmabuf.c 	list_add_tail(&dmabuf_obj->list, &vgpu->dmabuf_obj_list_head);
vgpu              450 drivers/gpu/drm/i915/gvt/dmabuf.c 	mutex_unlock(&vgpu->dmabuf_lock);
vgpu              452 drivers/gpu/drm/i915/gvt/dmabuf.c 	gvt_dbg_dpy("vgpu%d: %s new dmabuf_obj ref %d, id %d\n", vgpu->id,
vgpu              467 drivers/gpu/drm/i915/gvt/dmabuf.c int intel_vgpu_get_dmabuf(struct intel_vgpu *vgpu, unsigned int dmabuf_id)
vgpu              469 drivers/gpu/drm/i915/gvt/dmabuf.c 	struct drm_device *dev = &vgpu->gvt->dev_priv->drm;
vgpu              476 drivers/gpu/drm/i915/gvt/dmabuf.c 	mutex_lock(&vgpu->dmabuf_lock);
vgpu              478 drivers/gpu/drm/i915/gvt/dmabuf.c 	dmabuf_obj = pick_dmabuf_by_num(vgpu, dmabuf_id);
vgpu              515 drivers/gpu/drm/i915/gvt/dmabuf.c 	mutex_unlock(&vgpu->dmabuf_lock);
vgpu              519 drivers/gpu/drm/i915/gvt/dmabuf.c 		    vgpu->id, dmabuf_obj->dmabuf_id,
vgpu              534 drivers/gpu/drm/i915/gvt/dmabuf.c 	mutex_unlock(&vgpu->dmabuf_lock);
vgpu              538 drivers/gpu/drm/i915/gvt/dmabuf.c void intel_vgpu_dmabuf_cleanup(struct intel_vgpu *vgpu)
vgpu              543 drivers/gpu/drm/i915/gvt/dmabuf.c 	mutex_lock(&vgpu->dmabuf_lock);
vgpu              544 drivers/gpu/drm/i915/gvt/dmabuf.c 	list_for_each_safe(pos, n, &vgpu->dmabuf_obj_list_head) {
vgpu              547 drivers/gpu/drm/i915/gvt/dmabuf.c 		dmabuf_obj->vgpu = NULL;
vgpu              549 drivers/gpu/drm/i915/gvt/dmabuf.c 		idr_remove(&vgpu->object_idr, dmabuf_obj->dmabuf_id);
vgpu              550 drivers/gpu/drm/i915/gvt/dmabuf.c 		intel_gvt_hypervisor_put_vfio_device(vgpu);
vgpu              560 drivers/gpu/drm/i915/gvt/dmabuf.c 	mutex_unlock(&vgpu->dmabuf_lock);
vgpu               55 drivers/gpu/drm/i915/gvt/dmabuf.h 	struct intel_vgpu *vgpu;
vgpu               63 drivers/gpu/drm/i915/gvt/dmabuf.h int intel_vgpu_query_plane(struct intel_vgpu *vgpu, void *args);
vgpu               64 drivers/gpu/drm/i915/gvt/dmabuf.h int intel_vgpu_get_dmabuf(struct intel_vgpu *vgpu, unsigned int dmabuf_id);
vgpu               65 drivers/gpu/drm/i915/gvt/dmabuf.h void intel_vgpu_dmabuf_cleanup(struct intel_vgpu *vgpu);
vgpu               49 drivers/gpu/drm/i915/gvt/edid.c static unsigned char edid_get_byte(struct intel_vgpu *vgpu)
vgpu               51 drivers/gpu/drm/i915/gvt/edid.c 	struct intel_vgpu_i2c_edid *edid = &vgpu->display.i2c_edid;
vgpu               68 drivers/gpu/drm/i915/gvt/edid.c 	if (intel_vgpu_has_monitor_on_port(vgpu, edid->port)) {
vgpu               70 drivers/gpu/drm/i915/gvt/edid.c 			intel_vgpu_port(vgpu, edid->port)->edid;
vgpu              126 drivers/gpu/drm/i915/gvt/edid.c static void reset_gmbus_controller(struct intel_vgpu *vgpu)
vgpu              128 drivers/gpu/drm/i915/gvt/edid.c 	vgpu_vreg_t(vgpu, PCH_GMBUS2) = GMBUS_HW_RDY;
vgpu              129 drivers/gpu/drm/i915/gvt/edid.c 	if (!vgpu->display.i2c_edid.edid_available)
vgpu              130 drivers/gpu/drm/i915/gvt/edid.c 		vgpu_vreg_t(vgpu, PCH_GMBUS2) |= GMBUS_SATOER;
vgpu              131 drivers/gpu/drm/i915/gvt/edid.c 	vgpu->display.i2c_edid.gmbus.phase = GMBUS_IDLE_PHASE;
vgpu              135 drivers/gpu/drm/i915/gvt/edid.c static int gmbus0_mmio_write(struct intel_vgpu *vgpu,
vgpu              138 drivers/gpu/drm/i915/gvt/edid.c 	struct drm_i915_private *dev_priv = vgpu->gvt->dev_priv;
vgpu              141 drivers/gpu/drm/i915/gvt/edid.c 	memcpy(&vgpu_vreg(vgpu, offset), p_data, bytes);
vgpu              143 drivers/gpu/drm/i915/gvt/edid.c 	pin_select = vgpu_vreg(vgpu, offset) & _GMBUS_PIN_SEL_MASK;
vgpu              145 drivers/gpu/drm/i915/gvt/edid.c 	intel_vgpu_init_i2c_edid(vgpu);
vgpu              159 drivers/gpu/drm/i915/gvt/edid.c 	vgpu->display.i2c_edid.state = I2C_GMBUS;
vgpu              160 drivers/gpu/drm/i915/gvt/edid.c 	vgpu->display.i2c_edid.gmbus.phase = GMBUS_IDLE_PHASE;
vgpu              162 drivers/gpu/drm/i915/gvt/edid.c 	vgpu_vreg_t(vgpu, PCH_GMBUS2) &= ~GMBUS_ACTIVE;
vgpu              163 drivers/gpu/drm/i915/gvt/edid.c 	vgpu_vreg_t(vgpu, PCH_GMBUS2) |= GMBUS_HW_RDY | GMBUS_HW_WAIT_PHASE;
vgpu              165 drivers/gpu/drm/i915/gvt/edid.c 	if (intel_vgpu_has_monitor_on_port(vgpu, port) &&
vgpu              166 drivers/gpu/drm/i915/gvt/edid.c 			!intel_vgpu_port_is_dp(vgpu, port)) {
vgpu              167 drivers/gpu/drm/i915/gvt/edid.c 		vgpu->display.i2c_edid.port = port;
vgpu              168 drivers/gpu/drm/i915/gvt/edid.c 		vgpu->display.i2c_edid.edid_available = true;
vgpu              169 drivers/gpu/drm/i915/gvt/edid.c 		vgpu_vreg_t(vgpu, PCH_GMBUS2) &= ~GMBUS_SATOER;
vgpu              171 drivers/gpu/drm/i915/gvt/edid.c 		vgpu_vreg_t(vgpu, PCH_GMBUS2) |= GMBUS_SATOER;
vgpu              175 drivers/gpu/drm/i915/gvt/edid.c static int gmbus1_mmio_write(struct intel_vgpu *vgpu, unsigned int offset,
vgpu              178 drivers/gpu/drm/i915/gvt/edid.c 	struct intel_vgpu_i2c_edid *i2c_edid = &vgpu->display.i2c_edid;
vgpu              182 drivers/gpu/drm/i915/gvt/edid.c 	if (vgpu_vreg(vgpu, offset) & GMBUS_SW_CLR_INT) {
vgpu              184 drivers/gpu/drm/i915/gvt/edid.c 			vgpu_vreg(vgpu, offset) &= ~GMBUS_SW_CLR_INT;
vgpu              185 drivers/gpu/drm/i915/gvt/edid.c 			reset_gmbus_controller(vgpu);
vgpu              198 drivers/gpu/drm/i915/gvt/edid.c 			vgpu_vreg_t(vgpu, PCH_GMBUS2) &= ~GMBUS_INT;
vgpu              199 drivers/gpu/drm/i915/gvt/edid.c 			vgpu_vreg_t(vgpu, PCH_GMBUS2) |= GMBUS_HW_RDY;
vgpu              219 drivers/gpu/drm/i915/gvt/edid.c 					vgpu->id, slave_addr);
vgpu              237 drivers/gpu/drm/i915/gvt/edid.c 			if (gmbus1_bus_cycle(vgpu_vreg(vgpu, offset))
vgpu              239 drivers/gpu/drm/i915/gvt/edid.c 				intel_vgpu_init_i2c_edid(vgpu);
vgpu              247 drivers/gpu/drm/i915/gvt/edid.c 				vgpu_vreg_t(vgpu, PCH_GMBUS2) &= ~GMBUS_ACTIVE;
vgpu              259 drivers/gpu/drm/i915/gvt/edid.c 			vgpu_vreg_t(vgpu, PCH_GMBUS2) |= GMBUS_ACTIVE;
vgpu              271 drivers/gpu/drm/i915/gvt/edid.c 		vgpu_vreg(vgpu, offset) = wvalue;
vgpu              276 drivers/gpu/drm/i915/gvt/edid.c static int gmbus3_mmio_write(struct intel_vgpu *vgpu, unsigned int offset,
vgpu              283 drivers/gpu/drm/i915/gvt/edid.c static int gmbus3_mmio_read(struct intel_vgpu *vgpu, unsigned int offset,
vgpu              288 drivers/gpu/drm/i915/gvt/edid.c 	struct intel_vgpu_i2c_edid *i2c_edid = &vgpu->display.i2c_edid;
vgpu              295 drivers/gpu/drm/i915/gvt/edid.c 	if (vgpu_vreg_t(vgpu, PCH_GMBUS1) & GMBUS_SLAVE_READ) {
vgpu              297 drivers/gpu/drm/i915/gvt/edid.c 			memcpy(p_data, &vgpu_vreg(vgpu, offset), bytes);
vgpu              304 drivers/gpu/drm/i915/gvt/edid.c 			byte_data = edid_get_byte(vgpu);
vgpu              308 drivers/gpu/drm/i915/gvt/edid.c 		memcpy(&vgpu_vreg(vgpu, offset), &reg_data, byte_count);
vgpu              309 drivers/gpu/drm/i915/gvt/edid.c 		memcpy(p_data, &vgpu_vreg(vgpu, offset), bytes);
vgpu              323 drivers/gpu/drm/i915/gvt/edid.c 			intel_vgpu_init_i2c_edid(vgpu);
vgpu              330 drivers/gpu/drm/i915/gvt/edid.c 		memcpy(p_data, &vgpu_vreg(vgpu, offset), bytes);
vgpu              336 drivers/gpu/drm/i915/gvt/edid.c static int gmbus2_mmio_read(struct intel_vgpu *vgpu, unsigned int offset,
vgpu              339 drivers/gpu/drm/i915/gvt/edid.c 	u32 value = vgpu_vreg(vgpu, offset);
vgpu              341 drivers/gpu/drm/i915/gvt/edid.c 	if (!(vgpu_vreg(vgpu, offset) & GMBUS_INUSE))
vgpu              342 drivers/gpu/drm/i915/gvt/edid.c 		vgpu_vreg(vgpu, offset) |= GMBUS_INUSE;
vgpu              347 drivers/gpu/drm/i915/gvt/edid.c static int gmbus2_mmio_write(struct intel_vgpu *vgpu, unsigned int offset,
vgpu              353 drivers/gpu/drm/i915/gvt/edid.c 		vgpu_vreg(vgpu, offset) &= ~GMBUS_INUSE;
vgpu              371 drivers/gpu/drm/i915/gvt/edid.c int intel_gvt_i2c_handle_gmbus_read(struct intel_vgpu *vgpu,
vgpu              378 drivers/gpu/drm/i915/gvt/edid.c 		return gmbus2_mmio_read(vgpu, offset, p_data, bytes);
vgpu              380 drivers/gpu/drm/i915/gvt/edid.c 		return gmbus3_mmio_read(vgpu, offset, p_data, bytes);
vgpu              382 drivers/gpu/drm/i915/gvt/edid.c 	memcpy(p_data, &vgpu_vreg(vgpu, offset), bytes);
vgpu              399 drivers/gpu/drm/i915/gvt/edid.c int intel_gvt_i2c_handle_gmbus_write(struct intel_vgpu *vgpu,
vgpu              406 drivers/gpu/drm/i915/gvt/edid.c 		return gmbus0_mmio_write(vgpu, offset, p_data, bytes);
vgpu              408 drivers/gpu/drm/i915/gvt/edid.c 		return gmbus1_mmio_write(vgpu, offset, p_data, bytes);
vgpu              410 drivers/gpu/drm/i915/gvt/edid.c 		return gmbus2_mmio_write(vgpu, offset, p_data, bytes);
vgpu              412 drivers/gpu/drm/i915/gvt/edid.c 		return gmbus3_mmio_write(vgpu, offset, p_data, bytes);
vgpu              414 drivers/gpu/drm/i915/gvt/edid.c 	memcpy(&vgpu_vreg(vgpu, offset), p_data, bytes);
vgpu              471 drivers/gpu/drm/i915/gvt/edid.c void intel_gvt_i2c_handle_aux_ch_write(struct intel_vgpu *vgpu,
vgpu              476 drivers/gpu/drm/i915/gvt/edid.c 	struct intel_vgpu_i2c_edid *i2c_edid = &vgpu->display.i2c_edid;
vgpu              484 drivers/gpu/drm/i915/gvt/edid.c 		vgpu_vreg(vgpu, offset) = value;
vgpu              490 drivers/gpu/drm/i915/gvt/edid.c 	msg = vgpu_vreg(vgpu, offset + 4);
vgpu              501 drivers/gpu/drm/i915/gvt/edid.c 	vgpu_vreg(vgpu, offset) =
vgpu              509 drivers/gpu/drm/i915/gvt/edid.c 			intel_vgpu_init_i2c_edid(vgpu);
vgpu              516 drivers/gpu/drm/i915/gvt/edid.c 				intel_vgpu_init_i2c_edid(vgpu);
vgpu              521 drivers/gpu/drm/i915/gvt/edid.c 				if (intel_vgpu_has_monitor_on_port(vgpu,
vgpu              523 drivers/gpu/drm/i915/gvt/edid.c 					intel_vgpu_port_is_dp(vgpu, port_idx))
vgpu              540 drivers/gpu/drm/i915/gvt/edid.c 			unsigned char val = edid_get_byte(vgpu);
vgpu              551 drivers/gpu/drm/i915/gvt/edid.c 	vgpu_vreg(vgpu, offset + 4) = aux_data_for_write;
vgpu              561 drivers/gpu/drm/i915/gvt/edid.c void intel_vgpu_init_i2c_edid(struct intel_vgpu *vgpu)
vgpu              563 drivers/gpu/drm/i915/gvt/edid.c 	struct intel_vgpu_i2c_edid *edid = &vgpu->display.i2c_edid;
vgpu              137 drivers/gpu/drm/i915/gvt/edid.h void intel_vgpu_init_i2c_edid(struct intel_vgpu *vgpu);
vgpu              139 drivers/gpu/drm/i915/gvt/edid.h int intel_gvt_i2c_handle_gmbus_read(struct intel_vgpu *vgpu,
vgpu              142 drivers/gpu/drm/i915/gvt/edid.h int intel_gvt_i2c_handle_gmbus_write(struct intel_vgpu *vgpu,
vgpu              145 drivers/gpu/drm/i915/gvt/edid.h void intel_gvt_i2c_handle_aux_ch_write(struct intel_vgpu *vgpu,
vgpu               94 drivers/gpu/drm/i915/gvt/execlist.c 	struct intel_vgpu *vgpu = execlist->vgpu;
vgpu               97 drivers/gpu/drm/i915/gvt/execlist.c 	u32 status_reg = execlist_ring_mmio(vgpu->gvt,
vgpu              100 drivers/gpu/drm/i915/gvt/execlist.c 	status.ldw = vgpu_vreg(vgpu, status_reg);
vgpu              101 drivers/gpu/drm/i915/gvt/execlist.c 	status.udw = vgpu_vreg(vgpu, status_reg + 4);
vgpu              119 drivers/gpu/drm/i915/gvt/execlist.c 	vgpu_vreg(vgpu, status_reg) = status.ldw;
vgpu              120 drivers/gpu/drm/i915/gvt/execlist.c 	vgpu_vreg(vgpu, status_reg + 4) = status.udw;
vgpu              123 drivers/gpu/drm/i915/gvt/execlist.c 		vgpu->id, status_reg, status.ldw, status.udw);
vgpu              130 drivers/gpu/drm/i915/gvt/execlist.c 	struct intel_vgpu *vgpu = execlist->vgpu;
vgpu              136 drivers/gpu/drm/i915/gvt/execlist.c 	struct drm_i915_private *dev_priv = vgpu->gvt->dev_priv;
vgpu              138 drivers/gpu/drm/i915/gvt/execlist.c 	ctx_status_ptr_reg = execlist_ring_mmio(vgpu->gvt, ring_id,
vgpu              140 drivers/gpu/drm/i915/gvt/execlist.c 	ctx_status_buf_reg = execlist_ring_mmio(vgpu->gvt, ring_id,
vgpu              143 drivers/gpu/drm/i915/gvt/execlist.c 	ctx_status_ptr.dw = vgpu_vreg(vgpu, ctx_status_ptr_reg);
vgpu              156 drivers/gpu/drm/i915/gvt/execlist.c 	vgpu_vreg(vgpu, offset) = status->ldw;
vgpu              157 drivers/gpu/drm/i915/gvt/execlist.c 	vgpu_vreg(vgpu, offset + 4) = status->udw;
vgpu              160 drivers/gpu/drm/i915/gvt/execlist.c 	vgpu_vreg(vgpu, ctx_status_ptr_reg) = ctx_status_ptr.dw;
vgpu              163 drivers/gpu/drm/i915/gvt/execlist.c 	hwsp_gpa = intel_vgpu_gma_to_gpa(vgpu->gtt.ggtt_mm,
vgpu              164 drivers/gpu/drm/i915/gvt/execlist.c 					 vgpu->hws_pga[ring_id]);
vgpu              166 drivers/gpu/drm/i915/gvt/execlist.c 		intel_gvt_hypervisor_write_gpa(vgpu,
vgpu              170 drivers/gpu/drm/i915/gvt/execlist.c 		intel_gvt_hypervisor_write_gpa(vgpu,
vgpu              177 drivers/gpu/drm/i915/gvt/execlist.c 		vgpu->id, write_pointer, offset, status->ldw, status->udw);
vgpu              182 drivers/gpu/drm/i915/gvt/execlist.c 	intel_vgpu_trigger_virtual_event(vgpu,
vgpu              190 drivers/gpu/drm/i915/gvt/execlist.c 	struct intel_vgpu *vgpu = execlist->vgpu;
vgpu              263 drivers/gpu/drm/i915/gvt/execlist.c 	struct intel_vgpu *vgpu = execlist->vgpu;
vgpu              265 drivers/gpu/drm/i915/gvt/execlist.c 	u32 status_reg = execlist_ring_mmio(vgpu->gvt, ring_id,
vgpu              269 drivers/gpu/drm/i915/gvt/execlist.c 	status.ldw = vgpu_vreg(vgpu, status_reg);
vgpu              270 drivers/gpu/drm/i915/gvt/execlist.c 	status.udw = vgpu_vreg(vgpu, status_reg + 4);
vgpu              289 drivers/gpu/drm/i915/gvt/execlist.c 	struct intel_vgpu *vgpu = execlist->vgpu;
vgpu              379 drivers/gpu/drm/i915/gvt/execlist.c 	struct intel_vgpu *vgpu = workload->vgpu;
vgpu              380 drivers/gpu/drm/i915/gvt/execlist.c 	struct intel_vgpu_submission *s = &vgpu->submission;
vgpu              401 drivers/gpu/drm/i915/gvt/execlist.c 	struct intel_vgpu *vgpu = workload->vgpu;
vgpu              403 drivers/gpu/drm/i915/gvt/execlist.c 	struct intel_vgpu_submission *s = &vgpu->submission;
vgpu              406 drivers/gpu/drm/i915/gvt/execlist.c 	struct list_head *next = workload_q_head(vgpu, ring_id)->next;
vgpu              413 drivers/gpu/drm/i915/gvt/execlist.c 	if (workload->status || (vgpu->resetting_eng & BIT(ring_id)))
vgpu              416 drivers/gpu/drm/i915/gvt/execlist.c 	if (!list_empty(workload_q_head(vgpu, ring_id))) {
vgpu              439 drivers/gpu/drm/i915/gvt/execlist.c static int submit_context(struct intel_vgpu *vgpu, int ring_id,
vgpu              443 drivers/gpu/drm/i915/gvt/execlist.c 	struct intel_vgpu_submission *s = &vgpu->submission;
vgpu              446 drivers/gpu/drm/i915/gvt/execlist.c 	workload = intel_vgpu_create_workload(vgpu, ring_id, desc);
vgpu              464 drivers/gpu/drm/i915/gvt/execlist.c int intel_vgpu_submit_execlist(struct intel_vgpu *vgpu, int ring_id)
vgpu              466 drivers/gpu/drm/i915/gvt/execlist.c 	struct intel_vgpu_submission *s = &vgpu->submission;
vgpu              492 drivers/gpu/drm/i915/gvt/execlist.c 		ret = submit_context(vgpu, ring_id, desc[i], i == 0);
vgpu              507 drivers/gpu/drm/i915/gvt/execlist.c static void init_vgpu_execlist(struct intel_vgpu *vgpu, int ring_id)
vgpu              509 drivers/gpu/drm/i915/gvt/execlist.c 	struct intel_vgpu_submission *s = &vgpu->submission;
vgpu              516 drivers/gpu/drm/i915/gvt/execlist.c 	execlist->vgpu = vgpu;
vgpu              521 drivers/gpu/drm/i915/gvt/execlist.c 	ctx_status_ptr_reg = execlist_ring_mmio(vgpu->gvt, ring_id,
vgpu              523 drivers/gpu/drm/i915/gvt/execlist.c 	ctx_status_ptr.dw = vgpu_vreg(vgpu, ctx_status_ptr_reg);
vgpu              526 drivers/gpu/drm/i915/gvt/execlist.c 	vgpu_vreg(vgpu, ctx_status_ptr_reg) = ctx_status_ptr.dw;
vgpu              529 drivers/gpu/drm/i915/gvt/execlist.c static void clean_execlist(struct intel_vgpu *vgpu,
vgpu              532 drivers/gpu/drm/i915/gvt/execlist.c 	struct drm_i915_private *dev_priv = vgpu->gvt->dev_priv;
vgpu              534 drivers/gpu/drm/i915/gvt/execlist.c 	struct intel_vgpu_submission *s = &vgpu->submission;
vgpu              544 drivers/gpu/drm/i915/gvt/execlist.c static void reset_execlist(struct intel_vgpu *vgpu,
vgpu              547 drivers/gpu/drm/i915/gvt/execlist.c 	struct drm_i915_private *dev_priv = vgpu->gvt->dev_priv;
vgpu              552 drivers/gpu/drm/i915/gvt/execlist.c 		init_vgpu_execlist(vgpu, engine->id);
vgpu              555 drivers/gpu/drm/i915/gvt/execlist.c static int init_execlist(struct intel_vgpu *vgpu,
vgpu              558 drivers/gpu/drm/i915/gvt/execlist.c 	reset_execlist(vgpu, engine_mask);
vgpu              172 drivers/gpu/drm/i915/gvt/execlist.h 	struct intel_vgpu *vgpu;
vgpu              176 drivers/gpu/drm/i915/gvt/execlist.h void intel_vgpu_clean_execlist(struct intel_vgpu *vgpu);
vgpu              178 drivers/gpu/drm/i915/gvt/execlist.h int intel_vgpu_init_execlist(struct intel_vgpu *vgpu);
vgpu              180 drivers/gpu/drm/i915/gvt/execlist.h int intel_vgpu_submit_execlist(struct intel_vgpu *vgpu, int ring_id);
vgpu              182 drivers/gpu/drm/i915/gvt/execlist.h void intel_vgpu_reset_execlist(struct intel_vgpu *vgpu,
vgpu              146 drivers/gpu/drm/i915/gvt/fb_decoder.c static u32 intel_vgpu_get_stride(struct intel_vgpu *vgpu, int pipe,
vgpu              149 drivers/gpu/drm/i915/gvt/fb_decoder.c 	struct drm_i915_private *dev_priv = vgpu->gvt->dev_priv;
vgpu              151 drivers/gpu/drm/i915/gvt/fb_decoder.c 	u32 stride_reg = vgpu_vreg_t(vgpu, DSPSTRIDE(pipe)) & stride_mask;
vgpu              182 drivers/gpu/drm/i915/gvt/fb_decoder.c static int get_active_pipe(struct intel_vgpu *vgpu)
vgpu              187 drivers/gpu/drm/i915/gvt/fb_decoder.c 		if (pipe_is_enabled(vgpu, i))
vgpu              202 drivers/gpu/drm/i915/gvt/fb_decoder.c int intel_vgpu_decode_primary_plane(struct intel_vgpu *vgpu,
vgpu              206 drivers/gpu/drm/i915/gvt/fb_decoder.c 	struct drm_i915_private *dev_priv = vgpu->gvt->dev_priv;
vgpu              209 drivers/gpu/drm/i915/gvt/fb_decoder.c 	pipe = get_active_pipe(vgpu);
vgpu              213 drivers/gpu/drm/i915/gvt/fb_decoder.c 	val = vgpu_vreg_t(vgpu, DSPCNTR(pipe));
vgpu              247 drivers/gpu/drm/i915/gvt/fb_decoder.c 	plane->base = vgpu_vreg_t(vgpu, DSPSURF(pipe)) & I915_GTT_PAGE_MASK;
vgpu              248 drivers/gpu/drm/i915/gvt/fb_decoder.c 	if (!vgpu_gmadr_is_valid(vgpu, plane->base))
vgpu              251 drivers/gpu/drm/i915/gvt/fb_decoder.c 	plane->base_gpa = intel_vgpu_gma_to_gpa(vgpu->gtt.ggtt_mm, plane->base);
vgpu              258 drivers/gpu/drm/i915/gvt/fb_decoder.c 	plane->stride = intel_vgpu_get_stride(vgpu, pipe, plane->tiled,
vgpu              263 drivers/gpu/drm/i915/gvt/fb_decoder.c 	plane->width = (vgpu_vreg_t(vgpu, PIPESRC(pipe)) & _PIPE_H_SRCSZ_MASK) >>
vgpu              266 drivers/gpu/drm/i915/gvt/fb_decoder.c 	plane->height = (vgpu_vreg_t(vgpu, PIPESRC(pipe)) &
vgpu              270 drivers/gpu/drm/i915/gvt/fb_decoder.c 	val = vgpu_vreg_t(vgpu, DSPTILEOFF(pipe));
vgpu              332 drivers/gpu/drm/i915/gvt/fb_decoder.c int intel_vgpu_decode_cursor_plane(struct intel_vgpu *vgpu,
vgpu              337 drivers/gpu/drm/i915/gvt/fb_decoder.c 	struct drm_i915_private *dev_priv = vgpu->gvt->dev_priv;
vgpu              340 drivers/gpu/drm/i915/gvt/fb_decoder.c 	pipe = get_active_pipe(vgpu);
vgpu              344 drivers/gpu/drm/i915/gvt/fb_decoder.c 	val = vgpu_vreg_t(vgpu, CURCNTR(pipe));
vgpu              370 drivers/gpu/drm/i915/gvt/fb_decoder.c 	plane->base = vgpu_vreg_t(vgpu, CURBASE(pipe)) & I915_GTT_PAGE_MASK;
vgpu              371 drivers/gpu/drm/i915/gvt/fb_decoder.c 	if (!vgpu_gmadr_is_valid(vgpu, plane->base))
vgpu              374 drivers/gpu/drm/i915/gvt/fb_decoder.c 	plane->base_gpa = intel_vgpu_gma_to_gpa(vgpu->gtt.ggtt_mm, plane->base);
vgpu              381 drivers/gpu/drm/i915/gvt/fb_decoder.c 	val = vgpu_vreg_t(vgpu, CURPOS(pipe));
vgpu              387 drivers/gpu/drm/i915/gvt/fb_decoder.c 	plane->x_hot = vgpu_vreg_t(vgpu, vgtif_reg(cursor_x_hot));
vgpu              388 drivers/gpu/drm/i915/gvt/fb_decoder.c 	plane->y_hot = vgpu_vreg_t(vgpu, vgtif_reg(cursor_y_hot));
vgpu              411 drivers/gpu/drm/i915/gvt/fb_decoder.c int intel_vgpu_decode_sprite_plane(struct intel_vgpu *vgpu,
vgpu              419 drivers/gpu/drm/i915/gvt/fb_decoder.c 	pipe = get_active_pipe(vgpu);
vgpu              423 drivers/gpu/drm/i915/gvt/fb_decoder.c 	val = vgpu_vreg_t(vgpu, SPRCTL(pipe));
vgpu              474 drivers/gpu/drm/i915/gvt/fb_decoder.c 	plane->base = vgpu_vreg_t(vgpu, SPRSURF(pipe)) & I915_GTT_PAGE_MASK;
vgpu              475 drivers/gpu/drm/i915/gvt/fb_decoder.c 	if (!vgpu_gmadr_is_valid(vgpu, plane->base))
vgpu              478 drivers/gpu/drm/i915/gvt/fb_decoder.c 	plane->base_gpa = intel_vgpu_gma_to_gpa(vgpu->gtt.ggtt_mm, plane->base);
vgpu              485 drivers/gpu/drm/i915/gvt/fb_decoder.c 	plane->stride = vgpu_vreg_t(vgpu, SPRSTRIDE(pipe)) &
vgpu              488 drivers/gpu/drm/i915/gvt/fb_decoder.c 	val = vgpu_vreg_t(vgpu, SPRSIZE(pipe));
vgpu              496 drivers/gpu/drm/i915/gvt/fb_decoder.c 	val = vgpu_vreg_t(vgpu, SPRPOS(pipe));
vgpu              500 drivers/gpu/drm/i915/gvt/fb_decoder.c 	val = vgpu_vreg_t(vgpu, SPROFFSET(pipe));
vgpu              162 drivers/gpu/drm/i915/gvt/fb_decoder.h int intel_vgpu_decode_primary_plane(struct intel_vgpu *vgpu,
vgpu              164 drivers/gpu/drm/i915/gvt/fb_decoder.h int intel_vgpu_decode_cursor_plane(struct intel_vgpu *vgpu,
vgpu              166 drivers/gpu/drm/i915/gvt/fb_decoder.h int intel_vgpu_decode_sprite_plane(struct intel_vgpu *vgpu,
vgpu               54 drivers/gpu/drm/i915/gvt/gtt.c bool intel_gvt_ggtt_validate_range(struct intel_vgpu *vgpu, u64 addr, u32 size)
vgpu               57 drivers/gpu/drm/i915/gvt/gtt.c 		return vgpu_gmadr_is_valid(vgpu, addr);
vgpu               59 drivers/gpu/drm/i915/gvt/gtt.c 	if (vgpu_gmadr_is_aperture(vgpu, addr) &&
vgpu               60 drivers/gpu/drm/i915/gvt/gtt.c 	    vgpu_gmadr_is_aperture(vgpu, addr + size - 1))
vgpu               62 drivers/gpu/drm/i915/gvt/gtt.c 	else if (vgpu_gmadr_is_hidden(vgpu, addr) &&
vgpu               63 drivers/gpu/drm/i915/gvt/gtt.c 		 vgpu_gmadr_is_hidden(vgpu, addr + size - 1))
vgpu               72 drivers/gpu/drm/i915/gvt/gtt.c int intel_gvt_ggtt_gmadr_g2h(struct intel_vgpu *vgpu, u64 g_addr, u64 *h_addr)
vgpu               74 drivers/gpu/drm/i915/gvt/gtt.c 	if (WARN(!vgpu_gmadr_is_valid(vgpu, g_addr),
vgpu               78 drivers/gpu/drm/i915/gvt/gtt.c 	if (vgpu_gmadr_is_aperture(vgpu, g_addr))
vgpu               79 drivers/gpu/drm/i915/gvt/gtt.c 		*h_addr = vgpu_aperture_gmadr_base(vgpu)
vgpu               80 drivers/gpu/drm/i915/gvt/gtt.c 			  + (g_addr - vgpu_aperture_offset(vgpu));
vgpu               82 drivers/gpu/drm/i915/gvt/gtt.c 		*h_addr = vgpu_hidden_gmadr_base(vgpu)
vgpu               83 drivers/gpu/drm/i915/gvt/gtt.c 			  + (g_addr - vgpu_hidden_offset(vgpu));
vgpu               88 drivers/gpu/drm/i915/gvt/gtt.c int intel_gvt_ggtt_gmadr_h2g(struct intel_vgpu *vgpu, u64 h_addr, u64 *g_addr)
vgpu               90 drivers/gpu/drm/i915/gvt/gtt.c 	if (WARN(!gvt_gmadr_is_valid(vgpu->gvt, h_addr),
vgpu               94 drivers/gpu/drm/i915/gvt/gtt.c 	if (gvt_gmadr_is_aperture(vgpu->gvt, h_addr))
vgpu               95 drivers/gpu/drm/i915/gvt/gtt.c 		*g_addr = vgpu_aperture_gmadr_base(vgpu)
vgpu               96 drivers/gpu/drm/i915/gvt/gtt.c 			+ (h_addr - gvt_aperture_gmadr_base(vgpu->gvt));
vgpu               98 drivers/gpu/drm/i915/gvt/gtt.c 		*g_addr = vgpu_hidden_gmadr_base(vgpu)
vgpu               99 drivers/gpu/drm/i915/gvt/gtt.c 			+ (h_addr - gvt_hidden_gmadr_base(vgpu->gvt));
vgpu              103 drivers/gpu/drm/i915/gvt/gtt.c int intel_gvt_ggtt_index_g2h(struct intel_vgpu *vgpu, unsigned long g_index,
vgpu              109 drivers/gpu/drm/i915/gvt/gtt.c 	ret = intel_gvt_ggtt_gmadr_g2h(vgpu, g_index << I915_GTT_PAGE_SHIFT,
vgpu              118 drivers/gpu/drm/i915/gvt/gtt.c int intel_gvt_ggtt_h2g_index(struct intel_vgpu *vgpu, unsigned long h_index,
vgpu              124 drivers/gpu/drm/i915/gvt/gtt.c 	ret = intel_gvt_ggtt_gmadr_h2g(vgpu, h_index << I915_GTT_PAGE_SHIFT,
vgpu              303 drivers/gpu/drm/i915/gvt/gtt.c 		struct intel_vgpu *vgpu)
vgpu              305 drivers/gpu/drm/i915/gvt/gtt.c 	const struct intel_gvt_device_info *info = &vgpu->gvt->device_info;
vgpu              312 drivers/gpu/drm/i915/gvt/gtt.c 		ret = intel_gvt_hypervisor_read_gpa(vgpu, gpa +
vgpu              318 drivers/gpu/drm/i915/gvt/gtt.c 		e->val64 = read_pte64(vgpu->gvt->dev_priv, index);
vgpu              328 drivers/gpu/drm/i915/gvt/gtt.c 		struct intel_vgpu *vgpu)
vgpu              330 drivers/gpu/drm/i915/gvt/gtt.c 	const struct intel_gvt_device_info *info = &vgpu->gvt->device_info;
vgpu              337 drivers/gpu/drm/i915/gvt/gtt.c 		ret = intel_gvt_hypervisor_write_gpa(vgpu, gpa +
vgpu              343 drivers/gpu/drm/i915/gvt/gtt.c 		write_pte64(vgpu->gvt->dev_priv, index, e->val64);
vgpu              553 drivers/gpu/drm/i915/gvt/gtt.c 	struct intel_gvt_gtt_pte_ops *pte_ops = mm->vgpu->gvt->gtt.pte_ops;
vgpu              560 drivers/gpu/drm/i915/gvt/gtt.c 			   entry, index, false, 0, mm->vgpu);
vgpu              580 drivers/gpu/drm/i915/gvt/gtt.c 	struct intel_gvt_gtt_pte_ops *pte_ops = mm->vgpu->gvt->gtt.pte_ops;
vgpu              584 drivers/gpu/drm/i915/gvt/gtt.c 			   entry, index, false, 0, mm->vgpu);
vgpu              602 drivers/gpu/drm/i915/gvt/gtt.c 	struct intel_gvt_gtt_pte_ops *pte_ops = mm->vgpu->gvt->gtt.pte_ops;
vgpu              608 drivers/gpu/drm/i915/gvt/gtt.c 			   false, 0, mm->vgpu);
vgpu              614 drivers/gpu/drm/i915/gvt/gtt.c 	struct intel_gvt_gtt_pte_ops *pte_ops = mm->vgpu->gvt->gtt.pte_ops;
vgpu              619 drivers/gpu/drm/i915/gvt/gtt.c 			   false, 0, mm->vgpu);
vgpu              625 drivers/gpu/drm/i915/gvt/gtt.c 	struct intel_gvt_gtt_pte_ops *pte_ops = mm->vgpu->gvt->gtt.pte_ops;
vgpu              629 drivers/gpu/drm/i915/gvt/gtt.c 	pte_ops->get_entry(NULL, entry, index, false, 0, mm->vgpu);
vgpu              635 drivers/gpu/drm/i915/gvt/gtt.c 	struct intel_gvt_gtt_pte_ops *pte_ops = mm->vgpu->gvt->gtt.pte_ops;
vgpu              639 drivers/gpu/drm/i915/gvt/gtt.c 	pte_ops->set_entry(NULL, entry, index, false, 0, mm->vgpu);
vgpu              651 drivers/gpu/drm/i915/gvt/gtt.c 	struct intel_gvt *gvt = spt->vgpu->gvt;
vgpu              662 drivers/gpu/drm/i915/gvt/gtt.c 			spt->vgpu);
vgpu              680 drivers/gpu/drm/i915/gvt/gtt.c 	struct intel_gvt *gvt = spt->vgpu->gvt;
vgpu              691 drivers/gpu/drm/i915/gvt/gtt.c 			spt->vgpu);
vgpu              732 drivers/gpu/drm/i915/gvt/gtt.c static int detach_oos_page(struct intel_vgpu *vgpu,
vgpu              737 drivers/gpu/drm/i915/gvt/gtt.c 	struct device *kdev = &spt->vgpu->gvt->dev_priv->drm.pdev->dev;
vgpu              739 drivers/gpu/drm/i915/gvt/gtt.c 	trace_spt_free(spt->vgpu->id, spt, spt->guest_page.type);
vgpu              744 drivers/gpu/drm/i915/gvt/gtt.c 	radix_tree_delete(&spt->vgpu->gtt.spt_tree, spt->shadow_page.mfn);
vgpu              748 drivers/gpu/drm/i915/gvt/gtt.c 			detach_oos_page(spt->vgpu, spt->guest_page.oos_page);
vgpu              750 drivers/gpu/drm/i915/gvt/gtt.c 		intel_vgpu_unregister_page_track(spt->vgpu, spt->guest_page.gfn);
vgpu              757 drivers/gpu/drm/i915/gvt/gtt.c static void ppgtt_free_all_spt(struct intel_vgpu *vgpu)
vgpu              765 drivers/gpu/drm/i915/gvt/gtt.c 	radix_tree_for_each_slot(slot, &vgpu->gtt.spt_tree, &iter, 0) {
vgpu              798 drivers/gpu/drm/i915/gvt/gtt.c 		struct intel_vgpu *vgpu, unsigned long gfn)
vgpu              802 drivers/gpu/drm/i915/gvt/gtt.c 	track = intel_vgpu_find_page_track(vgpu, gfn);
vgpu              811 drivers/gpu/drm/i915/gvt/gtt.c 		struct intel_vgpu *vgpu, unsigned long mfn)
vgpu              813 drivers/gpu/drm/i915/gvt/gtt.c 	return radix_tree_lookup(&vgpu->gtt.spt_tree, mfn);
vgpu              820 drivers/gpu/drm/i915/gvt/gtt.c 		struct intel_vgpu *vgpu, enum intel_gvt_gtt_type type)
vgpu              822 drivers/gpu/drm/i915/gvt/gtt.c 	struct device *kdev = &vgpu->gvt->dev_priv->drm.pdev->dev;
vgpu              830 drivers/gpu/drm/i915/gvt/gtt.c 		if (reclaim_one_ppgtt_mm(vgpu->gvt))
vgpu              837 drivers/gpu/drm/i915/gvt/gtt.c 	spt->vgpu = vgpu;
vgpu              855 drivers/gpu/drm/i915/gvt/gtt.c 	ret = radix_tree_insert(&vgpu->gtt.spt_tree, spt->shadow_page.mfn, spt);
vgpu              870 drivers/gpu/drm/i915/gvt/gtt.c 		struct intel_vgpu *vgpu, enum intel_gvt_gtt_type type,
vgpu              876 drivers/gpu/drm/i915/gvt/gtt.c 	spt = ppgtt_alloc_spt(vgpu, type);
vgpu              883 drivers/gpu/drm/i915/gvt/gtt.c 	ret = intel_vgpu_register_page_track(vgpu, gfn,
vgpu              894 drivers/gpu/drm/i915/gvt/gtt.c 	trace_spt_alloc(vgpu->id, spt, type, spt->shadow_page.mfn, gfn);
vgpu              900 drivers/gpu/drm/i915/gvt/gtt.c 	((spt)->vgpu->gvt->device_info.gtt_entry_size_shift)
vgpu              909 drivers/gpu/drm/i915/gvt/gtt.c 		    spt->vgpu->gvt->gtt.pte_ops->test_present(e))
vgpu              915 drivers/gpu/drm/i915/gvt/gtt.c 		    spt->vgpu->gvt->gtt.pte_ops->test_present(e))
vgpu              926 drivers/gpu/drm/i915/gvt/gtt.c 	trace_spt_refcount(spt->vgpu->id, "inc", spt, v, (v + 1));
vgpu              934 drivers/gpu/drm/i915/gvt/gtt.c 	trace_spt_refcount(spt->vgpu->id, "dec", spt, v, (v - 1));
vgpu              940 drivers/gpu/drm/i915/gvt/gtt.c static int ppgtt_invalidate_spt_by_shadow_entry(struct intel_vgpu *vgpu,
vgpu              943 drivers/gpu/drm/i915/gvt/gtt.c 	struct intel_gvt_gtt_pte_ops *ops = vgpu->gvt->gtt.pte_ops;
vgpu              962 drivers/gpu/drm/i915/gvt/gtt.c 			vgpu->gtt.scratch_pt[cur_pt_type].page_mfn)
vgpu              965 drivers/gpu/drm/i915/gvt/gtt.c 	s = intel_vgpu_find_spt_by_mfn(vgpu, ops->get_pfn(e));
vgpu              977 drivers/gpu/drm/i915/gvt/gtt.c 	struct intel_vgpu *vgpu = spt->vgpu;
vgpu              978 drivers/gpu/drm/i915/gvt/gtt.c 	struct intel_gvt_gtt_pte_ops *ops = vgpu->gvt->gtt.pte_ops;
vgpu              986 drivers/gpu/drm/i915/gvt/gtt.c 	if (!pfn || pfn == vgpu->gtt.scratch_pt[type].page_mfn)
vgpu              989 drivers/gpu/drm/i915/gvt/gtt.c 	intel_gvt_hypervisor_dma_unmap_guest_page(vgpu, pfn << PAGE_SHIFT);
vgpu              994 drivers/gpu/drm/i915/gvt/gtt.c 	struct intel_vgpu *vgpu = spt->vgpu;
vgpu              999 drivers/gpu/drm/i915/gvt/gtt.c 	trace_spt_change(spt->vgpu->id, "die", spt,
vgpu             1026 drivers/gpu/drm/i915/gvt/gtt.c 					spt->vgpu, &e);
vgpu             1035 drivers/gpu/drm/i915/gvt/gtt.c 	trace_spt_change(spt->vgpu->id, "release", spt,
vgpu             1045 drivers/gpu/drm/i915/gvt/gtt.c static bool vgpu_ips_enabled(struct intel_vgpu *vgpu)
vgpu             1047 drivers/gpu/drm/i915/gvt/gtt.c 	struct drm_i915_private *dev_priv = vgpu->gvt->dev_priv;
vgpu             1050 drivers/gpu/drm/i915/gvt/gtt.c 		u32 ips = vgpu_vreg_t(vgpu, GEN8_GAMW_ECO_DEV_RW_IA) &
vgpu             1064 drivers/gpu/drm/i915/gvt/gtt.c 		struct intel_vgpu *vgpu, struct intel_gvt_gtt_entry *we)
vgpu             1066 drivers/gpu/drm/i915/gvt/gtt.c 	struct intel_gvt_gtt_pte_ops *ops = vgpu->gvt->gtt.pte_ops;
vgpu             1074 drivers/gpu/drm/i915/gvt/gtt.c 		ips = vgpu_ips_enabled(vgpu) && ops->test_ips(we);
vgpu             1076 drivers/gpu/drm/i915/gvt/gtt.c 	spt = intel_vgpu_find_spt_by_gfn(vgpu, ops->get_pfn(we));
vgpu             1099 drivers/gpu/drm/i915/gvt/gtt.c 		spt = ppgtt_alloc_spt_gfn(vgpu, type, ops->get_pfn(we), ips);
vgpu             1105 drivers/gpu/drm/i915/gvt/gtt.c 		ret = intel_vgpu_enable_page_track(vgpu, spt->guest_page.gfn);
vgpu             1113 drivers/gpu/drm/i915/gvt/gtt.c 		trace_spt_change(vgpu->id, "new", spt, spt->guest_page.gfn,
vgpu             1130 drivers/gpu/drm/i915/gvt/gtt.c 	struct intel_gvt_gtt_pte_ops *ops = s->vgpu->gvt->gtt.pte_ops;
vgpu             1150 drivers/gpu/drm/i915/gvt/gtt.c static int is_2MB_gtt_possible(struct intel_vgpu *vgpu,
vgpu             1153 drivers/gpu/drm/i915/gvt/gtt.c 	struct intel_gvt_gtt_pte_ops *ops = vgpu->gvt->gtt.pte_ops;
vgpu             1156 drivers/gpu/drm/i915/gvt/gtt.c 	if (!HAS_PAGE_SIZES(vgpu->gvt->dev_priv, I915_GTT_PAGE_SIZE_2M))
vgpu             1159 drivers/gpu/drm/i915/gvt/gtt.c 	pfn = intel_gvt_hypervisor_gfn_to_mfn(vgpu, ops->get_pfn(entry));
vgpu             1166 drivers/gpu/drm/i915/gvt/gtt.c static int split_2MB_gtt_entry(struct intel_vgpu *vgpu,
vgpu             1170 drivers/gpu/drm/i915/gvt/gtt.c 	struct intel_gvt_gtt_pte_ops *ops = vgpu->gvt->gtt.pte_ops;
vgpu             1182 drivers/gpu/drm/i915/gvt/gtt.c 	sub_spt = ppgtt_alloc_spt(vgpu, GTT_TYPE_PPGTT_PTE_PT);
vgpu             1187 drivers/gpu/drm/i915/gvt/gtt.c 		ret = intel_gvt_hypervisor_dma_map_guest_page(vgpu,
vgpu             1213 drivers/gpu/drm/i915/gvt/gtt.c static int split_64KB_gtt_entry(struct intel_vgpu *vgpu,
vgpu             1217 drivers/gpu/drm/i915/gvt/gtt.c 	struct intel_gvt_gtt_pte_ops *ops = vgpu->gvt->gtt.pte_ops;
vgpu             1233 drivers/gpu/drm/i915/gvt/gtt.c 		ret = intel_gvt_hypervisor_dma_map_guest_page(vgpu,
vgpu             1244 drivers/gpu/drm/i915/gvt/gtt.c static int ppgtt_populate_shadow_entry(struct intel_vgpu *vgpu,
vgpu             1248 drivers/gpu/drm/i915/gvt/gtt.c 	struct intel_gvt_gtt_pte_ops *pte_ops = vgpu->gvt->gtt.pte_ops;
vgpu             1270 drivers/gpu/drm/i915/gvt/gtt.c 		return split_64KB_gtt_entry(vgpu, spt, index, &se);
vgpu             1273 drivers/gpu/drm/i915/gvt/gtt.c 		ret = is_2MB_gtt_possible(vgpu, ge);
vgpu             1275 drivers/gpu/drm/i915/gvt/gtt.c 			return split_2MB_gtt_entry(vgpu, spt, index, &se);
vgpu             1288 drivers/gpu/drm/i915/gvt/gtt.c 	ret = intel_gvt_hypervisor_dma_map_guest_page(vgpu, gfn, page_size,
vgpu             1300 drivers/gpu/drm/i915/gvt/gtt.c 	struct intel_vgpu *vgpu = spt->vgpu;
vgpu             1301 drivers/gpu/drm/i915/gvt/gtt.c 	struct intel_gvt *gvt = vgpu->gvt;
vgpu             1308 drivers/gpu/drm/i915/gvt/gtt.c 	trace_spt_change(spt->vgpu->id, "born", spt,
vgpu             1313 drivers/gpu/drm/i915/gvt/gtt.c 			s = ppgtt_populate_spt_by_guest_entry(vgpu, &ge);
vgpu             1323 drivers/gpu/drm/i915/gvt/gtt.c 			if (!intel_gvt_hypervisor_is_valid_gfn(vgpu, gfn)) {
vgpu             1329 drivers/gpu/drm/i915/gvt/gtt.c 			ret = ppgtt_populate_shadow_entry(vgpu, spt, i, &ge);
vgpu             1344 drivers/gpu/drm/i915/gvt/gtt.c 	struct intel_vgpu *vgpu = spt->vgpu;
vgpu             1345 drivers/gpu/drm/i915/gvt/gtt.c 	struct intel_gvt_gtt_pte_ops *ops = vgpu->gvt->gtt.pte_ops;
vgpu             1348 drivers/gpu/drm/i915/gvt/gtt.c 	trace_spt_guest_change(spt->vgpu->id, "remove", spt,
vgpu             1358 drivers/gpu/drm/i915/gvt/gtt.c 	    vgpu->gtt.scratch_pt[spt->shadow_page.type].page_mfn)
vgpu             1363 drivers/gpu/drm/i915/gvt/gtt.c 			intel_vgpu_find_spt_by_mfn(vgpu, ops->get_pfn(se));
vgpu             1389 drivers/gpu/drm/i915/gvt/gtt.c 	struct intel_vgpu *vgpu = spt->vgpu;
vgpu             1394 drivers/gpu/drm/i915/gvt/gtt.c 	trace_spt_guest_change(spt->vgpu->id, "add", spt, spt->shadow_page.type,
vgpu             1401 drivers/gpu/drm/i915/gvt/gtt.c 		s = ppgtt_populate_spt_by_guest_entry(vgpu, we);
vgpu             1410 drivers/gpu/drm/i915/gvt/gtt.c 		ret = ppgtt_populate_shadow_entry(vgpu, spt, index, we);
vgpu             1421 drivers/gpu/drm/i915/gvt/gtt.c static int sync_oos_page(struct intel_vgpu *vgpu,
vgpu             1424 drivers/gpu/drm/i915/gvt/gtt.c 	const struct intel_gvt_device_info *info = &vgpu->gvt->device_info;
vgpu             1425 drivers/gpu/drm/i915/gvt/gtt.c 	struct intel_gvt *gvt = vgpu->gvt;
vgpu             1432 drivers/gpu/drm/i915/gvt/gtt.c 	trace_oos_change(vgpu->id, "sync", oos_page->id,
vgpu             1440 drivers/gpu/drm/i915/gvt/gtt.c 		ops->get_entry(oos_page->mem, &old, index, false, 0, vgpu);
vgpu             1442 drivers/gpu/drm/i915/gvt/gtt.c 			       spt->guest_page.gfn << PAGE_SHIFT, vgpu);
vgpu             1448 drivers/gpu/drm/i915/gvt/gtt.c 		trace_oos_sync(vgpu->id, oos_page->id,
vgpu             1452 drivers/gpu/drm/i915/gvt/gtt.c 		ret = ppgtt_populate_shadow_entry(vgpu, spt, index, &new);
vgpu             1456 drivers/gpu/drm/i915/gvt/gtt.c 		ops->set_entry(oos_page->mem, &new, index, false, 0, vgpu);
vgpu             1464 drivers/gpu/drm/i915/gvt/gtt.c static int detach_oos_page(struct intel_vgpu *vgpu,
vgpu             1467 drivers/gpu/drm/i915/gvt/gtt.c 	struct intel_gvt *gvt = vgpu->gvt;
vgpu             1470 drivers/gpu/drm/i915/gvt/gtt.c 	trace_oos_change(vgpu->id, "detach", oos_page->id,
vgpu             1486 drivers/gpu/drm/i915/gvt/gtt.c 	struct intel_gvt *gvt = spt->vgpu->gvt;
vgpu             1489 drivers/gpu/drm/i915/gvt/gtt.c 	ret = intel_gvt_hypervisor_read_gpa(spt->vgpu,
vgpu             1500 drivers/gpu/drm/i915/gvt/gtt.c 	trace_oos_change(spt->vgpu->id, "attach", oos_page->id,
vgpu             1510 drivers/gpu/drm/i915/gvt/gtt.c 	ret = intel_vgpu_enable_page_track(spt->vgpu, spt->guest_page.gfn);
vgpu             1514 drivers/gpu/drm/i915/gvt/gtt.c 	trace_oos_change(spt->vgpu->id, "set page sync", oos_page->id,
vgpu             1518 drivers/gpu/drm/i915/gvt/gtt.c 	return sync_oos_page(spt->vgpu, oos_page);
vgpu             1523 drivers/gpu/drm/i915/gvt/gtt.c 	struct intel_gvt *gvt = spt->vgpu->gvt;
vgpu             1536 drivers/gpu/drm/i915/gvt/gtt.c 		ret = detach_oos_page(spt->vgpu, oos_page);
vgpu             1552 drivers/gpu/drm/i915/gvt/gtt.c 	trace_oos_change(spt->vgpu->id, "set page out of sync", oos_page->id,
vgpu             1555 drivers/gpu/drm/i915/gvt/gtt.c 	list_add_tail(&oos_page->vm_list, &spt->vgpu->gtt.oos_page_list_head);
vgpu             1556 drivers/gpu/drm/i915/gvt/gtt.c 	return intel_vgpu_disable_page_track(spt->vgpu, spt->guest_page.gfn);
vgpu             1569 drivers/gpu/drm/i915/gvt/gtt.c int intel_vgpu_sync_oos_pages(struct intel_vgpu *vgpu)
vgpu             1578 drivers/gpu/drm/i915/gvt/gtt.c 	list_for_each_safe(pos, n, &vgpu->gtt.oos_page_list_head) {
vgpu             1595 drivers/gpu/drm/i915/gvt/gtt.c 	struct intel_vgpu *vgpu = spt->vgpu;
vgpu             1597 drivers/gpu/drm/i915/gvt/gtt.c 	struct intel_gvt_gtt_pte_ops *ops = vgpu->gvt->gtt.pte_ops;
vgpu             1629 drivers/gpu/drm/i915/gvt/gtt.c 					vgpu->gtt.scratch_pt[type].page_mfn);
vgpu             1636 drivers/gpu/drm/i915/gvt/gtt.c 				     vgpu->gtt.scratch_pt[type].page_mfn);
vgpu             1640 drivers/gpu/drm/i915/gvt/gtt.c 				     vgpu->gtt.scratch_pt[type].page_mfn);
vgpu             1669 drivers/gpu/drm/i915/gvt/gtt.c 			&spt->vgpu->gtt.post_shadow_list_head);
vgpu             1682 drivers/gpu/drm/i915/gvt/gtt.c int intel_vgpu_flush_post_shadow(struct intel_vgpu *vgpu)
vgpu             1690 drivers/gpu/drm/i915/gvt/gtt.c 	list_for_each_safe(pos, n, &vgpu->gtt.post_shadow_list_head) {
vgpu             1713 drivers/gpu/drm/i915/gvt/gtt.c 	struct intel_vgpu *vgpu = spt->vgpu;
vgpu             1714 drivers/gpu/drm/i915/gvt/gtt.c 	struct intel_gvt_gtt_pte_ops *ops = vgpu->gvt->gtt.pte_ops;
vgpu             1715 drivers/gpu/drm/i915/gvt/gtt.c 	const struct intel_gvt_device_info *info = &vgpu->gvt->device_info;
vgpu             1748 drivers/gpu/drm/i915/gvt/gtt.c 			ops->set_pfn(&se, vgpu->gtt.scratch_pt[type].page_mfn);
vgpu             1761 drivers/gpu/drm/i915/gvt/gtt.c 				false, 0, vgpu);
vgpu             1776 drivers/gpu/drm/i915/gvt/gtt.c 	struct intel_vgpu *vgpu = mm->vgpu;
vgpu             1777 drivers/gpu/drm/i915/gvt/gtt.c 	struct intel_gvt *gvt = vgpu->gvt;
vgpu             1792 drivers/gpu/drm/i915/gvt/gtt.c 		ppgtt_invalidate_spt_by_shadow_entry(vgpu, &se);
vgpu             1796 drivers/gpu/drm/i915/gvt/gtt.c 		trace_spt_guest_change(vgpu->id, "destroy root pointer",
vgpu             1806 drivers/gpu/drm/i915/gvt/gtt.c 	struct intel_vgpu *vgpu = mm->vgpu;
vgpu             1807 drivers/gpu/drm/i915/gvt/gtt.c 	struct intel_gvt *gvt = vgpu->gvt;
vgpu             1825 drivers/gpu/drm/i915/gvt/gtt.c 		trace_spt_guest_change(vgpu->id, __func__, NULL,
vgpu             1828 drivers/gpu/drm/i915/gvt/gtt.c 		spt = ppgtt_populate_spt_by_guest_entry(vgpu, &ge);
vgpu             1837 drivers/gpu/drm/i915/gvt/gtt.c 		trace_spt_guest_change(vgpu->id, "populate root pointer",
vgpu             1847 drivers/gpu/drm/i915/gvt/gtt.c static struct intel_vgpu_mm *vgpu_alloc_mm(struct intel_vgpu *vgpu)
vgpu             1855 drivers/gpu/drm/i915/gvt/gtt.c 	mm->vgpu = vgpu;
vgpu             1878 drivers/gpu/drm/i915/gvt/gtt.c struct intel_vgpu_mm *intel_vgpu_create_ppgtt_mm(struct intel_vgpu *vgpu,
vgpu             1881 drivers/gpu/drm/i915/gvt/gtt.c 	struct intel_gvt *gvt = vgpu->gvt;
vgpu             1885 drivers/gpu/drm/i915/gvt/gtt.c 	mm = vgpu_alloc_mm(vgpu);
vgpu             1911 drivers/gpu/drm/i915/gvt/gtt.c 	list_add_tail(&mm->ppgtt_mm.list, &vgpu->gtt.ppgtt_mm_list_head);
vgpu             1920 drivers/gpu/drm/i915/gvt/gtt.c static struct intel_vgpu_mm *intel_vgpu_create_ggtt_mm(struct intel_vgpu *vgpu)
vgpu             1925 drivers/gpu/drm/i915/gvt/gtt.c 	mm = vgpu_alloc_mm(vgpu);
vgpu             1931 drivers/gpu/drm/i915/gvt/gtt.c 	nr_entries = gvt_ggtt_gm_sz(vgpu->gvt) >> I915_GTT_PAGE_SHIFT;
vgpu             1934 drivers/gpu/drm/i915/gvt/gtt.c 				   vgpu->gvt->device_info.gtt_entry_size));
vgpu             1960 drivers/gpu/drm/i915/gvt/gtt.c 		mutex_lock(&mm->vgpu->gvt->gtt.ppgtt_mm_lock);
vgpu             1962 drivers/gpu/drm/i915/gvt/gtt.c 		mutex_unlock(&mm->vgpu->gvt->gtt.ppgtt_mm_lock);
vgpu             2005 drivers/gpu/drm/i915/gvt/gtt.c 		mutex_lock(&mm->vgpu->gvt->gtt.ppgtt_mm_lock);
vgpu             2007 drivers/gpu/drm/i915/gvt/gtt.c 			       &mm->vgpu->gvt->gtt.ppgtt_mm_lru_list_head);
vgpu             2008 drivers/gpu/drm/i915/gvt/gtt.c 		mutex_unlock(&mm->vgpu->gvt->gtt.ppgtt_mm_lock);
vgpu             2042 drivers/gpu/drm/i915/gvt/gtt.c 	struct intel_vgpu *vgpu = mm->vgpu;
vgpu             2043 drivers/gpu/drm/i915/gvt/gtt.c 	struct intel_gvt_gtt_pte_ops *ops = vgpu->gvt->gtt.pte_ops;
vgpu             2046 drivers/gpu/drm/i915/gvt/gtt.c 	s = intel_vgpu_find_spt_by_mfn(vgpu, ops->get_pfn(e));
vgpu             2070 drivers/gpu/drm/i915/gvt/gtt.c 	struct intel_vgpu *vgpu = mm->vgpu;
vgpu             2071 drivers/gpu/drm/i915/gvt/gtt.c 	struct intel_gvt *gvt = vgpu->gvt;
vgpu             2084 drivers/gpu/drm/i915/gvt/gtt.c 		if (!vgpu_gmadr_is_valid(vgpu, gma))
vgpu             2093 drivers/gpu/drm/i915/gvt/gtt.c 		trace_gma_translate(vgpu->id, "ggtt", 0, 0, gma, gpa);
vgpu             2132 drivers/gpu/drm/i915/gvt/gtt.c 		trace_gma_translate(vgpu->id, "ppgtt", 0,
vgpu             2142 drivers/gpu/drm/i915/gvt/gtt.c static int emulate_ggtt_mmio_read(struct intel_vgpu *vgpu,
vgpu             2145 drivers/gpu/drm/i915/gvt/gtt.c 	struct intel_vgpu_mm *ggtt_mm = vgpu->gtt.ggtt_mm;
vgpu             2146 drivers/gpu/drm/i915/gvt/gtt.c 	const struct intel_gvt_device_info *info = &vgpu->gvt->device_info;
vgpu             2155 drivers/gpu/drm/i915/gvt/gtt.c 	if (!intel_gvt_ggtt_validate_range(vgpu,
vgpu             2180 drivers/gpu/drm/i915/gvt/gtt.c int intel_vgpu_emulate_ggtt_mmio_read(struct intel_vgpu *vgpu, unsigned int off,
vgpu             2183 drivers/gpu/drm/i915/gvt/gtt.c 	const struct intel_gvt_device_info *info = &vgpu->gvt->device_info;
vgpu             2190 drivers/gpu/drm/i915/gvt/gtt.c 	ret = emulate_ggtt_mmio_read(vgpu, off, p_data, bytes);
vgpu             2194 drivers/gpu/drm/i915/gvt/gtt.c static void ggtt_invalidate_pte(struct intel_vgpu *vgpu,
vgpu             2197 drivers/gpu/drm/i915/gvt/gtt.c 	struct intel_gvt_gtt_pte_ops *pte_ops = vgpu->gvt->gtt.pte_ops;
vgpu             2201 drivers/gpu/drm/i915/gvt/gtt.c 	if (pfn != vgpu->gvt->gtt.scratch_mfn)
vgpu             2202 drivers/gpu/drm/i915/gvt/gtt.c 		intel_gvt_hypervisor_dma_unmap_guest_page(vgpu,
vgpu             2206 drivers/gpu/drm/i915/gvt/gtt.c static int emulate_ggtt_mmio_write(struct intel_vgpu *vgpu, unsigned int off,
vgpu             2209 drivers/gpu/drm/i915/gvt/gtt.c 	struct intel_gvt *gvt = vgpu->gvt;
vgpu             2211 drivers/gpu/drm/i915/gvt/gtt.c 	struct intel_vgpu_mm *ggtt_mm = vgpu->gtt.ggtt_mm;
vgpu             2228 drivers/gpu/drm/i915/gvt/gtt.c 	if (!vgpu_gmadr_is_valid(vgpu, gma))
vgpu             2289 drivers/gpu/drm/i915/gvt/gtt.c 		if (!intel_gvt_hypervisor_is_valid_gfn(vgpu, gfn)) {
vgpu             2294 drivers/gpu/drm/i915/gvt/gtt.c 		ret = intel_gvt_hypervisor_dma_map_guest_page(vgpu, gfn,
vgpu             2314 drivers/gpu/drm/i915/gvt/gtt.c 	ggtt_invalidate_pte(vgpu, &e);
vgpu             2333 drivers/gpu/drm/i915/gvt/gtt.c int intel_vgpu_emulate_ggtt_mmio_write(struct intel_vgpu *vgpu,
vgpu             2336 drivers/gpu/drm/i915/gvt/gtt.c 	const struct intel_gvt_device_info *info = &vgpu->gvt->device_info;
vgpu             2343 drivers/gpu/drm/i915/gvt/gtt.c 	ret = emulate_ggtt_mmio_write(vgpu, off, p_data, bytes);
vgpu             2347 drivers/gpu/drm/i915/gvt/gtt.c static int alloc_scratch_pages(struct intel_vgpu *vgpu,
vgpu             2350 drivers/gpu/drm/i915/gvt/gtt.c 	struct intel_vgpu_gtt *gtt = &vgpu->gtt;
vgpu             2351 drivers/gpu/drm/i915/gvt/gtt.c 	struct intel_gvt_gtt_pte_ops *ops = vgpu->gvt->gtt.pte_ops;
vgpu             2353 drivers/gpu/drm/i915/gvt/gtt.c 				vgpu->gvt->device_info.gtt_entry_size_shift;
vgpu             2356 drivers/gpu/drm/i915/gvt/gtt.c 	struct device *dev = &vgpu->gvt->dev_priv->drm.pdev->dev;
vgpu             2379 drivers/gpu/drm/i915/gvt/gtt.c 			vgpu->id, type, gtt->scratch_pt[type].page_mfn);
vgpu             2404 drivers/gpu/drm/i915/gvt/gtt.c 			ops->set_entry(scratch_pt, &se, i, false, 0, vgpu);
vgpu             2410 drivers/gpu/drm/i915/gvt/gtt.c static int release_scratch_page_tree(struct intel_vgpu *vgpu)
vgpu             2413 drivers/gpu/drm/i915/gvt/gtt.c 	struct device *dev = &vgpu->gvt->dev_priv->drm.pdev->dev;
vgpu             2417 drivers/gpu/drm/i915/gvt/gtt.c 		if (vgpu->gtt.scratch_pt[i].page != NULL) {
vgpu             2418 drivers/gpu/drm/i915/gvt/gtt.c 			daddr = (dma_addr_t)(vgpu->gtt.scratch_pt[i].page_mfn <<
vgpu             2421 drivers/gpu/drm/i915/gvt/gtt.c 			__free_page(vgpu->gtt.scratch_pt[i].page);
vgpu             2422 drivers/gpu/drm/i915/gvt/gtt.c 			vgpu->gtt.scratch_pt[i].page = NULL;
vgpu             2423 drivers/gpu/drm/i915/gvt/gtt.c 			vgpu->gtt.scratch_pt[i].page_mfn = 0;
vgpu             2430 drivers/gpu/drm/i915/gvt/gtt.c static int create_scratch_page_tree(struct intel_vgpu *vgpu)
vgpu             2435 drivers/gpu/drm/i915/gvt/gtt.c 		ret = alloc_scratch_pages(vgpu, i);
vgpu             2443 drivers/gpu/drm/i915/gvt/gtt.c 	release_scratch_page_tree(vgpu);
vgpu             2457 drivers/gpu/drm/i915/gvt/gtt.c int intel_vgpu_init_gtt(struct intel_vgpu *vgpu)
vgpu             2459 drivers/gpu/drm/i915/gvt/gtt.c 	struct intel_vgpu_gtt *gtt = &vgpu->gtt;
vgpu             2467 drivers/gpu/drm/i915/gvt/gtt.c 	gtt->ggtt_mm = intel_vgpu_create_ggtt_mm(vgpu);
vgpu             2473 drivers/gpu/drm/i915/gvt/gtt.c 	intel_vgpu_reset_ggtt(vgpu, false);
vgpu             2477 drivers/gpu/drm/i915/gvt/gtt.c 	return create_scratch_page_tree(vgpu);
vgpu             2480 drivers/gpu/drm/i915/gvt/gtt.c static void intel_vgpu_destroy_all_ppgtt_mm(struct intel_vgpu *vgpu)
vgpu             2485 drivers/gpu/drm/i915/gvt/gtt.c 	list_for_each_safe(pos, n, &vgpu->gtt.ppgtt_mm_list_head) {
vgpu             2490 drivers/gpu/drm/i915/gvt/gtt.c 	if (GEM_WARN_ON(!list_empty(&vgpu->gtt.ppgtt_mm_list_head)))
vgpu             2493 drivers/gpu/drm/i915/gvt/gtt.c 	if (GEM_WARN_ON(!radix_tree_empty(&vgpu->gtt.spt_tree))) {
vgpu             2495 drivers/gpu/drm/i915/gvt/gtt.c 		ppgtt_free_all_spt(vgpu);
vgpu             2499 drivers/gpu/drm/i915/gvt/gtt.c static void intel_vgpu_destroy_ggtt_mm(struct intel_vgpu *vgpu)
vgpu             2504 drivers/gpu/drm/i915/gvt/gtt.c 				 &vgpu->gtt.ggtt_mm->ggtt_mm.partial_pte_list,
vgpu             2510 drivers/gpu/drm/i915/gvt/gtt.c 	intel_vgpu_destroy_mm(vgpu->gtt.ggtt_mm);
vgpu             2511 drivers/gpu/drm/i915/gvt/gtt.c 	vgpu->gtt.ggtt_mm = NULL;
vgpu             2524 drivers/gpu/drm/i915/gvt/gtt.c void intel_vgpu_clean_gtt(struct intel_vgpu *vgpu)
vgpu             2526 drivers/gpu/drm/i915/gvt/gtt.c 	intel_vgpu_destroy_all_ppgtt_mm(vgpu);
vgpu             2527 drivers/gpu/drm/i915/gvt/gtt.c 	intel_vgpu_destroy_ggtt_mm(vgpu);
vgpu             2528 drivers/gpu/drm/i915/gvt/gtt.c 	release_scratch_page_tree(vgpu);
vgpu             2595 drivers/gpu/drm/i915/gvt/gtt.c struct intel_vgpu_mm *intel_vgpu_find_ppgtt_mm(struct intel_vgpu *vgpu,
vgpu             2601 drivers/gpu/drm/i915/gvt/gtt.c 	list_for_each(pos, &vgpu->gtt.ppgtt_mm_list_head) {
vgpu             2632 drivers/gpu/drm/i915/gvt/gtt.c struct intel_vgpu_mm *intel_vgpu_get_ppgtt_mm(struct intel_vgpu *vgpu,
vgpu             2637 drivers/gpu/drm/i915/gvt/gtt.c 	mm = intel_vgpu_find_ppgtt_mm(vgpu, pdps);
vgpu             2641 drivers/gpu/drm/i915/gvt/gtt.c 		mm = intel_vgpu_create_ppgtt_mm(vgpu, root_entry_type, pdps);
vgpu             2658 drivers/gpu/drm/i915/gvt/gtt.c int intel_vgpu_put_ppgtt_mm(struct intel_vgpu *vgpu, u64 pdps[])
vgpu             2662 drivers/gpu/drm/i915/gvt/gtt.c 	mm = intel_vgpu_find_ppgtt_mm(vgpu, pdps);
vgpu             2753 drivers/gpu/drm/i915/gvt/gtt.c void intel_vgpu_invalidate_ppgtt(struct intel_vgpu *vgpu)
vgpu             2758 drivers/gpu/drm/i915/gvt/gtt.c 	list_for_each_safe(pos, n, &vgpu->gtt.ppgtt_mm_list_head) {
vgpu             2761 drivers/gpu/drm/i915/gvt/gtt.c 			mutex_lock(&vgpu->gvt->gtt.ppgtt_mm_lock);
vgpu             2763 drivers/gpu/drm/i915/gvt/gtt.c 			mutex_unlock(&vgpu->gvt->gtt.ppgtt_mm_lock);
vgpu             2779 drivers/gpu/drm/i915/gvt/gtt.c void intel_vgpu_reset_ggtt(struct intel_vgpu *vgpu, bool invalidate_old)
vgpu             2781 drivers/gpu/drm/i915/gvt/gtt.c 	struct intel_gvt *gvt = vgpu->gvt;
vgpu             2783 drivers/gpu/drm/i915/gvt/gtt.c 	struct intel_gvt_gtt_pte_ops *pte_ops = vgpu->gvt->gtt.pte_ops;
vgpu             2792 drivers/gpu/drm/i915/gvt/gtt.c 	index = vgpu_aperture_gmadr_base(vgpu) >> PAGE_SHIFT;
vgpu             2793 drivers/gpu/drm/i915/gvt/gtt.c 	num_entries = vgpu_aperture_sz(vgpu) >> PAGE_SHIFT;
vgpu             2796 drivers/gpu/drm/i915/gvt/gtt.c 			ggtt_get_host_entry(vgpu->gtt.ggtt_mm, &old_entry, index);
vgpu             2797 drivers/gpu/drm/i915/gvt/gtt.c 			ggtt_invalidate_pte(vgpu, &old_entry);
vgpu             2799 drivers/gpu/drm/i915/gvt/gtt.c 		ggtt_set_host_entry(vgpu->gtt.ggtt_mm, &entry, index++);
vgpu             2802 drivers/gpu/drm/i915/gvt/gtt.c 	index = vgpu_hidden_gmadr_base(vgpu) >> PAGE_SHIFT;
vgpu             2803 drivers/gpu/drm/i915/gvt/gtt.c 	num_entries = vgpu_hidden_sz(vgpu) >> PAGE_SHIFT;
vgpu             2806 drivers/gpu/drm/i915/gvt/gtt.c 			ggtt_get_host_entry(vgpu->gtt.ggtt_mm, &old_entry, index);
vgpu             2807 drivers/gpu/drm/i915/gvt/gtt.c 			ggtt_invalidate_pte(vgpu, &old_entry);
vgpu             2809 drivers/gpu/drm/i915/gvt/gtt.c 		ggtt_set_host_entry(vgpu->gtt.ggtt_mm, &entry, index++);
vgpu             2823 drivers/gpu/drm/i915/gvt/gtt.c void intel_vgpu_reset_gtt(struct intel_vgpu *vgpu)
vgpu             2829 drivers/gpu/drm/i915/gvt/gtt.c 	intel_vgpu_destroy_all_ppgtt_mm(vgpu);
vgpu             2830 drivers/gpu/drm/i915/gvt/gtt.c 	intel_vgpu_reset_ggtt(vgpu, true);
vgpu               54 drivers/gpu/drm/i915/gvt/gtt.h 			 struct intel_vgpu *vgpu);
vgpu               60 drivers/gpu/drm/i915/gvt/gtt.h 			 struct intel_vgpu *vgpu);
vgpu              144 drivers/gpu/drm/i915/gvt/gtt.h 	struct intel_vgpu *vgpu;
vgpu              171 drivers/gpu/drm/i915/gvt/gtt.h struct intel_vgpu_mm *intel_vgpu_create_ppgtt_mm(struct intel_vgpu *vgpu,
vgpu              208 drivers/gpu/drm/i915/gvt/gtt.h int intel_vgpu_init_gtt(struct intel_vgpu *vgpu);
vgpu              209 drivers/gpu/drm/i915/gvt/gtt.h void intel_vgpu_clean_gtt(struct intel_vgpu *vgpu);
vgpu              210 drivers/gpu/drm/i915/gvt/gtt.h void intel_vgpu_reset_ggtt(struct intel_vgpu *vgpu, bool invalidate_old);
vgpu              211 drivers/gpu/drm/i915/gvt/gtt.h void intel_vgpu_invalidate_ppgtt(struct intel_vgpu *vgpu);
vgpu              214 drivers/gpu/drm/i915/gvt/gtt.h void intel_vgpu_reset_gtt(struct intel_vgpu *vgpu);
vgpu              217 drivers/gpu/drm/i915/gvt/gtt.h struct intel_vgpu_mm *intel_gvt_find_ppgtt_mm(struct intel_vgpu *vgpu,
vgpu              234 drivers/gpu/drm/i915/gvt/gtt.h 	struct intel_vgpu *vgpu;
vgpu              256 drivers/gpu/drm/i915/gvt/gtt.h int intel_vgpu_sync_oos_pages(struct intel_vgpu *vgpu);
vgpu              258 drivers/gpu/drm/i915/gvt/gtt.h int intel_vgpu_flush_post_shadow(struct intel_vgpu *vgpu);
vgpu              267 drivers/gpu/drm/i915/gvt/gtt.h struct intel_vgpu_mm *intel_vgpu_find_ppgtt_mm(struct intel_vgpu *vgpu,
vgpu              270 drivers/gpu/drm/i915/gvt/gtt.h struct intel_vgpu_mm *intel_vgpu_get_ppgtt_mm(struct intel_vgpu *vgpu,
vgpu              273 drivers/gpu/drm/i915/gvt/gtt.h int intel_vgpu_put_ppgtt_mm(struct intel_vgpu *vgpu, u64 pdps[]);
vgpu              275 drivers/gpu/drm/i915/gvt/gtt.h int intel_vgpu_emulate_ggtt_mmio_read(struct intel_vgpu *vgpu,
vgpu              278 drivers/gpu/drm/i915/gvt/gtt.h int intel_vgpu_emulate_ggtt_mmio_write(struct intel_vgpu *vgpu,
vgpu              306 drivers/gpu/drm/i915/gvt/gvt.c 	struct intel_vgpu *vgpu;
vgpu              370 drivers/gpu/drm/i915/gvt/gvt.c 	vgpu = intel_gvt_create_idle_vgpu(gvt);
vgpu              371 drivers/gpu/drm/i915/gvt/gvt.c 	if (IS_ERR(vgpu)) {
vgpu              372 drivers/gpu/drm/i915/gvt/gvt.c 		ret = PTR_ERR(vgpu);
vgpu              376 drivers/gpu/drm/i915/gvt/gvt.c 	gvt->idle_vgpu = vgpu;
vgpu              111 drivers/gpu/drm/i915/gvt/gvt.h #define vgpu_cfg_space(vgpu) ((vgpu)->cfg_space.virtual_cfg_space)
vgpu              125 drivers/gpu/drm/i915/gvt/gvt.h #define vgpu_opregion(vgpu) (&(vgpu->opregion))
vgpu              144 drivers/gpu/drm/i915/gvt/gvt.h 	int (*init)(struct intel_vgpu *vgpu, intel_engine_mask_t engine_mask);
vgpu              145 drivers/gpu/drm/i915/gvt/gvt.h 	void (*clean)(struct intel_vgpu *vgpu, intel_engine_mask_t engine_mask);
vgpu              146 drivers/gpu/drm/i915/gvt/gvt.h 	void (*reset)(struct intel_vgpu *vgpu, intel_engine_mask_t engine_mask);
vgpu              400 drivers/gpu/drm/i915/gvt/gvt.h #define vgpu_aperture_offset(vgpu)	((vgpu)->gm.low_gm_node.start)
vgpu              401 drivers/gpu/drm/i915/gvt/gvt.h #define vgpu_hidden_offset(vgpu)	((vgpu)->gm.high_gm_node.start)
vgpu              402 drivers/gpu/drm/i915/gvt/gvt.h #define vgpu_aperture_sz(vgpu)		((vgpu)->gm.aperture_sz)
vgpu              403 drivers/gpu/drm/i915/gvt/gvt.h #define vgpu_hidden_sz(vgpu)		((vgpu)->gm.hidden_sz)
vgpu              405 drivers/gpu/drm/i915/gvt/gvt.h #define vgpu_aperture_pa_base(vgpu) \
vgpu              406 drivers/gpu/drm/i915/gvt/gvt.h 	(gvt_aperture_pa_base(vgpu->gvt) + vgpu_aperture_offset(vgpu))
vgpu              408 drivers/gpu/drm/i915/gvt/gvt.h #define vgpu_ggtt_gm_sz(vgpu) ((vgpu)->gm.aperture_sz + (vgpu)->gm.hidden_sz)
vgpu              410 drivers/gpu/drm/i915/gvt/gvt.h #define vgpu_aperture_pa_end(vgpu) \
vgpu              411 drivers/gpu/drm/i915/gvt/gvt.h 	(vgpu_aperture_pa_base(vgpu) + vgpu_aperture_sz(vgpu) - 1)
vgpu              413 drivers/gpu/drm/i915/gvt/gvt.h #define vgpu_aperture_gmadr_base(vgpu) (vgpu_aperture_offset(vgpu))
vgpu              414 drivers/gpu/drm/i915/gvt/gvt.h #define vgpu_aperture_gmadr_end(vgpu) \
vgpu              415 drivers/gpu/drm/i915/gvt/gvt.h 	(vgpu_aperture_gmadr_base(vgpu) + vgpu_aperture_sz(vgpu) - 1)
vgpu              417 drivers/gpu/drm/i915/gvt/gvt.h #define vgpu_hidden_gmadr_base(vgpu) (vgpu_hidden_offset(vgpu))
vgpu              418 drivers/gpu/drm/i915/gvt/gvt.h #define vgpu_hidden_gmadr_end(vgpu) \
vgpu              419 drivers/gpu/drm/i915/gvt/gvt.h 	(vgpu_hidden_gmadr_base(vgpu) + vgpu_hidden_sz(vgpu) - 1)
vgpu              421 drivers/gpu/drm/i915/gvt/gvt.h #define vgpu_fence_base(vgpu) (vgpu->fence.base)
vgpu              422 drivers/gpu/drm/i915/gvt/gvt.h #define vgpu_fence_sz(vgpu) (vgpu->fence.size)
vgpu              436 drivers/gpu/drm/i915/gvt/gvt.h int intel_vgpu_alloc_resource(struct intel_vgpu *vgpu,
vgpu              438 drivers/gpu/drm/i915/gvt/gvt.h void intel_vgpu_reset_resource(struct intel_vgpu *vgpu);
vgpu              439 drivers/gpu/drm/i915/gvt/gvt.h void intel_vgpu_free_resource(struct intel_vgpu *vgpu);
vgpu              440 drivers/gpu/drm/i915/gvt/gvt.h void intel_vgpu_write_fence(struct intel_vgpu *vgpu,
vgpu              445 drivers/gpu/drm/i915/gvt/gvt.h #define vgpu_vreg_t(vgpu, reg) \
vgpu              446 drivers/gpu/drm/i915/gvt/gvt.h 	(*(u32 *)(vgpu->mmio.vreg + i915_mmio_reg_offset(reg)))
vgpu              447 drivers/gpu/drm/i915/gvt/gvt.h #define vgpu_vreg(vgpu, offset) \
vgpu              448 drivers/gpu/drm/i915/gvt/gvt.h 	(*(u32 *)(vgpu->mmio.vreg + (offset)))
vgpu              449 drivers/gpu/drm/i915/gvt/gvt.h #define vgpu_vreg64_t(vgpu, reg) \
vgpu              450 drivers/gpu/drm/i915/gvt/gvt.h 	(*(u64 *)(vgpu->mmio.vreg + i915_mmio_reg_offset(reg)))
vgpu              451 drivers/gpu/drm/i915/gvt/gvt.h #define vgpu_vreg64(vgpu, offset) \
vgpu              452 drivers/gpu/drm/i915/gvt/gvt.h 	(*(u64 *)(vgpu->mmio.vreg + (offset)))
vgpu              454 drivers/gpu/drm/i915/gvt/gvt.h #define for_each_active_vgpu(gvt, vgpu, id) \
vgpu              455 drivers/gpu/drm/i915/gvt/gvt.h 	idr_for_each_entry((&(gvt)->vgpu_idr), (vgpu), (id)) \
vgpu              456 drivers/gpu/drm/i915/gvt/gvt.h 		for_each_if(vgpu->active)
vgpu              458 drivers/gpu/drm/i915/gvt/gvt.h static inline void intel_vgpu_write_pci_bar(struct intel_vgpu *vgpu,
vgpu              465 drivers/gpu/drm/i915/gvt/gvt.h 	pval = (u32 *)(vgpu_cfg_space(vgpu) + offset);
vgpu              482 drivers/gpu/drm/i915/gvt/gvt.h void intel_gvt_destroy_idle_vgpu(struct intel_vgpu *vgpu);
vgpu              485 drivers/gpu/drm/i915/gvt/gvt.h void intel_gvt_destroy_vgpu(struct intel_vgpu *vgpu);
vgpu              486 drivers/gpu/drm/i915/gvt/gvt.h void intel_gvt_release_vgpu(struct intel_vgpu *vgpu);
vgpu              487 drivers/gpu/drm/i915/gvt/gvt.h void intel_gvt_reset_vgpu_locked(struct intel_vgpu *vgpu, bool dmlr,
vgpu              489 drivers/gpu/drm/i915/gvt/gvt.h void intel_gvt_reset_vgpu(struct intel_vgpu *vgpu);
vgpu              490 drivers/gpu/drm/i915/gvt/gvt.h void intel_gvt_activate_vgpu(struct intel_vgpu *vgpu);
vgpu              491 drivers/gpu/drm/i915/gvt/gvt.h void intel_gvt_deactivate_vgpu(struct intel_vgpu *vgpu);
vgpu              494 drivers/gpu/drm/i915/gvt/gvt.h #define vgpu_gmadr_is_aperture(vgpu, gmadr) \
vgpu              495 drivers/gpu/drm/i915/gvt/gvt.h 	((gmadr >= vgpu_aperture_gmadr_base(vgpu)) && \
vgpu              496 drivers/gpu/drm/i915/gvt/gvt.h 	 (gmadr <= vgpu_aperture_gmadr_end(vgpu)))
vgpu              498 drivers/gpu/drm/i915/gvt/gvt.h #define vgpu_gmadr_is_hidden(vgpu, gmadr) \
vgpu              499 drivers/gpu/drm/i915/gvt/gvt.h 	((gmadr >= vgpu_hidden_gmadr_base(vgpu)) && \
vgpu              500 drivers/gpu/drm/i915/gvt/gvt.h 	 (gmadr <= vgpu_hidden_gmadr_end(vgpu)))
vgpu              502 drivers/gpu/drm/i915/gvt/gvt.h #define vgpu_gmadr_is_valid(vgpu, gmadr) \
vgpu              503 drivers/gpu/drm/i915/gvt/gvt.h 	 ((vgpu_gmadr_is_aperture(vgpu, gmadr) || \
vgpu              504 drivers/gpu/drm/i915/gvt/gvt.h 	  (vgpu_gmadr_is_hidden(vgpu, gmadr))))
vgpu              518 drivers/gpu/drm/i915/gvt/gvt.h bool intel_gvt_ggtt_validate_range(struct intel_vgpu *vgpu, u64 addr, u32 size);
vgpu              519 drivers/gpu/drm/i915/gvt/gvt.h int intel_gvt_ggtt_gmadr_g2h(struct intel_vgpu *vgpu, u64 g_addr, u64 *h_addr);
vgpu              520 drivers/gpu/drm/i915/gvt/gvt.h int intel_gvt_ggtt_gmadr_h2g(struct intel_vgpu *vgpu, u64 h_addr, u64 *g_addr);
vgpu              521 drivers/gpu/drm/i915/gvt/gvt.h int intel_gvt_ggtt_index_g2h(struct intel_vgpu *vgpu, unsigned long g_index,
vgpu              523 drivers/gpu/drm/i915/gvt/gvt.h int intel_gvt_ggtt_h2g_index(struct intel_vgpu *vgpu, unsigned long h_index,
vgpu              526 drivers/gpu/drm/i915/gvt/gvt.h void intel_vgpu_init_cfg_space(struct intel_vgpu *vgpu,
vgpu              528 drivers/gpu/drm/i915/gvt/gvt.h void intel_vgpu_reset_cfg_space(struct intel_vgpu *vgpu);
vgpu              530 drivers/gpu/drm/i915/gvt/gvt.h int intel_vgpu_emulate_cfg_read(struct intel_vgpu *vgpu, unsigned int offset,
vgpu              533 drivers/gpu/drm/i915/gvt/gvt.h int intel_vgpu_emulate_cfg_write(struct intel_vgpu *vgpu, unsigned int offset,
vgpu              536 drivers/gpu/drm/i915/gvt/gvt.h void intel_vgpu_emulate_hotplug(struct intel_vgpu *vgpu, bool connected);
vgpu              538 drivers/gpu/drm/i915/gvt/gvt.h static inline u64 intel_vgpu_get_bar_gpa(struct intel_vgpu *vgpu, int bar)
vgpu              541 drivers/gpu/drm/i915/gvt/gvt.h 	return (*(u64 *)(vgpu->cfg_space.virtual_cfg_space + bar)) &
vgpu              545 drivers/gpu/drm/i915/gvt/gvt.h void intel_vgpu_clean_opregion(struct intel_vgpu *vgpu);
vgpu              546 drivers/gpu/drm/i915/gvt/gvt.h int intel_vgpu_init_opregion(struct intel_vgpu *vgpu);
vgpu              547 drivers/gpu/drm/i915/gvt/gvt.h int intel_vgpu_opregion_base_write_handler(struct intel_vgpu *vgpu, u32 gpa);
vgpu              549 drivers/gpu/drm/i915/gvt/gvt.h int intel_vgpu_emulate_opregion_request(struct intel_vgpu *vgpu, u32 swsci);
vgpu              550 drivers/gpu/drm/i915/gvt/gvt.h void populate_pvinfo_page(struct intel_vgpu *vgpu);
vgpu              553 drivers/gpu/drm/i915/gvt/gvt.h void enter_failsafe_mode(struct intel_vgpu *vgpu, int reason);
vgpu              566 drivers/gpu/drm/i915/gvt/gvt.h 	void (*vgpu_destroy)(struct intel_vgpu *vgpu);
vgpu              567 drivers/gpu/drm/i915/gvt/gvt.h 	void (*vgpu_release)(struct intel_vgpu *vgpu);
vgpu              575 drivers/gpu/drm/i915/gvt/gvt.h 	int (*vgpu_query_plane)(struct intel_vgpu *vgpu, void *);
vgpu              576 drivers/gpu/drm/i915/gvt/gvt.h 	int (*vgpu_get_dmabuf)(struct intel_vgpu *vgpu, unsigned int);
vgpu              579 drivers/gpu/drm/i915/gvt/gvt.h 	void (*emulate_hotplug)(struct intel_vgpu *vgpu, bool connected);
vgpu              689 drivers/gpu/drm/i915/gvt/gvt.h void intel_gvt_debugfs_add_vgpu(struct intel_vgpu *vgpu);
vgpu              690 drivers/gpu/drm/i915/gvt/gvt.h void intel_gvt_debugfs_remove_vgpu(struct intel_vgpu *vgpu);
vgpu               72 drivers/gpu/drm/i915/gvt/handlers.c static void read_vreg(struct intel_vgpu *vgpu, unsigned int offset,
vgpu               75 drivers/gpu/drm/i915/gvt/handlers.c 	memcpy(p_data, &vgpu_vreg(vgpu, offset), bytes);
vgpu               78 drivers/gpu/drm/i915/gvt/handlers.c static void write_vreg(struct intel_vgpu *vgpu, unsigned int offset,
vgpu               81 drivers/gpu/drm/i915/gvt/handlers.c 	memcpy(&vgpu_vreg(vgpu, offset), p_data, bytes);
vgpu              173 drivers/gpu/drm/i915/gvt/handlers.c void enter_failsafe_mode(struct intel_vgpu *vgpu, int reason)
vgpu              188 drivers/gpu/drm/i915/gvt/handlers.c 	pr_err("Now vgpu %d will enter failsafe mode.\n", vgpu->id);
vgpu              189 drivers/gpu/drm/i915/gvt/handlers.c 	vgpu->failsafe = true;
vgpu              192 drivers/gpu/drm/i915/gvt/handlers.c static int sanitize_fence_mmio_access(struct intel_vgpu *vgpu,
vgpu              195 drivers/gpu/drm/i915/gvt/handlers.c 	unsigned int max_fence = vgpu_fence_sz(vgpu);
vgpu              205 drivers/gpu/drm/i915/gvt/handlers.c 		if (!vgpu->pv_notified)
vgpu              206 drivers/gpu/drm/i915/gvt/handlers.c 			enter_failsafe_mode(vgpu,
vgpu              215 drivers/gpu/drm/i915/gvt/handlers.c static int gamw_echo_dev_rw_ia_write(struct intel_vgpu *vgpu,
vgpu              220 drivers/gpu/drm/i915/gvt/handlers.c 	if (INTEL_GEN(vgpu->gvt->dev_priv) <= 10) {
vgpu              222 drivers/gpu/drm/i915/gvt/handlers.c 			gvt_dbg_core("vgpu%d: ips enabled\n", vgpu->id);
vgpu              224 drivers/gpu/drm/i915/gvt/handlers.c 			gvt_dbg_core("vgpu%d: ips disabled\n", vgpu->id);
vgpu              236 drivers/gpu/drm/i915/gvt/handlers.c 	write_vreg(vgpu, offset, p_data, bytes);
vgpu              240 drivers/gpu/drm/i915/gvt/handlers.c static int fence_mmio_read(struct intel_vgpu *vgpu, unsigned int off,
vgpu              245 drivers/gpu/drm/i915/gvt/handlers.c 	ret = sanitize_fence_mmio_access(vgpu, offset_to_fence_num(off),
vgpu              249 drivers/gpu/drm/i915/gvt/handlers.c 	read_vreg(vgpu, off, p_data, bytes);
vgpu              253 drivers/gpu/drm/i915/gvt/handlers.c static int fence_mmio_write(struct intel_vgpu *vgpu, unsigned int off,
vgpu              256 drivers/gpu/drm/i915/gvt/handlers.c 	struct drm_i915_private *dev_priv = vgpu->gvt->dev_priv;
vgpu              260 drivers/gpu/drm/i915/gvt/handlers.c 	ret = sanitize_fence_mmio_access(vgpu, fence_num, p_data, bytes);
vgpu              263 drivers/gpu/drm/i915/gvt/handlers.c 	write_vreg(vgpu, off, p_data, bytes);
vgpu              266 drivers/gpu/drm/i915/gvt/handlers.c 	intel_vgpu_write_fence(vgpu, fence_num,
vgpu              267 drivers/gpu/drm/i915/gvt/handlers.c 			vgpu_vreg64(vgpu, fence_num_to_offset(fence_num)));
vgpu              277 drivers/gpu/drm/i915/gvt/handlers.c static int mul_force_wake_write(struct intel_vgpu *vgpu,
vgpu              283 drivers/gpu/drm/i915/gvt/handlers.c 	old = vgpu_vreg(vgpu, offset);
vgpu              286 drivers/gpu/drm/i915/gvt/handlers.c 	if (INTEL_GEN(vgpu->gvt->dev_priv)  >=  9) {
vgpu              306 drivers/gpu/drm/i915/gvt/handlers.c 	vgpu_vreg(vgpu, offset) = new;
vgpu              307 drivers/gpu/drm/i915/gvt/handlers.c 	vgpu_vreg(vgpu, ack_reg_offset) = (new & GENMASK(15, 0));
vgpu              311 drivers/gpu/drm/i915/gvt/handlers.c static int gdrst_mmio_write(struct intel_vgpu *vgpu, unsigned int offset,
vgpu              317 drivers/gpu/drm/i915/gvt/handlers.c 	write_vreg(vgpu, offset, p_data, bytes);
vgpu              318 drivers/gpu/drm/i915/gvt/handlers.c 	data = vgpu_vreg(vgpu, offset);
vgpu              321 drivers/gpu/drm/i915/gvt/handlers.c 		gvt_dbg_mmio("vgpu%d: request full GPU reset\n", vgpu->id);
vgpu              325 drivers/gpu/drm/i915/gvt/handlers.c 			gvt_dbg_mmio("vgpu%d: request RCS reset\n", vgpu->id);
vgpu              329 drivers/gpu/drm/i915/gvt/handlers.c 			gvt_dbg_mmio("vgpu%d: request VCS reset\n", vgpu->id);
vgpu              333 drivers/gpu/drm/i915/gvt/handlers.c 			gvt_dbg_mmio("vgpu%d: request BCS Reset\n", vgpu->id);
vgpu              337 drivers/gpu/drm/i915/gvt/handlers.c 			gvt_dbg_mmio("vgpu%d: request VECS Reset\n", vgpu->id);
vgpu              341 drivers/gpu/drm/i915/gvt/handlers.c 			gvt_dbg_mmio("vgpu%d: request VCS2 Reset\n", vgpu->id);
vgpu              344 drivers/gpu/drm/i915/gvt/handlers.c 		engine_mask &= INTEL_INFO(vgpu->gvt->dev_priv)->engine_mask;
vgpu              348 drivers/gpu/drm/i915/gvt/handlers.c 	intel_gvt_reset_vgpu_locked(vgpu, false, engine_mask);
vgpu              351 drivers/gpu/drm/i915/gvt/handlers.c 	vgpu_vreg(vgpu, offset) = 0;
vgpu              356 drivers/gpu/drm/i915/gvt/handlers.c static int gmbus_mmio_read(struct intel_vgpu *vgpu, unsigned int offset,
vgpu              359 drivers/gpu/drm/i915/gvt/handlers.c 	return intel_gvt_i2c_handle_gmbus_read(vgpu, offset, p_data, bytes);
vgpu              362 drivers/gpu/drm/i915/gvt/handlers.c static int gmbus_mmio_write(struct intel_vgpu *vgpu, unsigned int offset,
vgpu              365 drivers/gpu/drm/i915/gvt/handlers.c 	return intel_gvt_i2c_handle_gmbus_write(vgpu, offset, p_data, bytes);
vgpu              368 drivers/gpu/drm/i915/gvt/handlers.c static int pch_pp_control_mmio_write(struct intel_vgpu *vgpu,
vgpu              371 drivers/gpu/drm/i915/gvt/handlers.c 	write_vreg(vgpu, offset, p_data, bytes);
vgpu              373 drivers/gpu/drm/i915/gvt/handlers.c 	if (vgpu_vreg(vgpu, offset) & PANEL_POWER_ON) {
vgpu              374 drivers/gpu/drm/i915/gvt/handlers.c 		vgpu_vreg_t(vgpu, PCH_PP_STATUS) |= PP_ON;
vgpu              375 drivers/gpu/drm/i915/gvt/handlers.c 		vgpu_vreg_t(vgpu, PCH_PP_STATUS) |= PP_SEQUENCE_STATE_ON_IDLE;
vgpu              376 drivers/gpu/drm/i915/gvt/handlers.c 		vgpu_vreg_t(vgpu, PCH_PP_STATUS) &= ~PP_SEQUENCE_POWER_DOWN;
vgpu              377 drivers/gpu/drm/i915/gvt/handlers.c 		vgpu_vreg_t(vgpu, PCH_PP_STATUS) &= ~PP_CYCLE_DELAY_ACTIVE;
vgpu              380 drivers/gpu/drm/i915/gvt/handlers.c 		vgpu_vreg_t(vgpu, PCH_PP_STATUS) &=
vgpu              386 drivers/gpu/drm/i915/gvt/handlers.c static int transconf_mmio_write(struct intel_vgpu *vgpu,
vgpu              389 drivers/gpu/drm/i915/gvt/handlers.c 	write_vreg(vgpu, offset, p_data, bytes);
vgpu              391 drivers/gpu/drm/i915/gvt/handlers.c 	if (vgpu_vreg(vgpu, offset) & TRANS_ENABLE)
vgpu              392 drivers/gpu/drm/i915/gvt/handlers.c 		vgpu_vreg(vgpu, offset) |= TRANS_STATE_ENABLE;
vgpu              394 drivers/gpu/drm/i915/gvt/handlers.c 		vgpu_vreg(vgpu, offset) &= ~TRANS_STATE_ENABLE;
vgpu              398 drivers/gpu/drm/i915/gvt/handlers.c static int lcpll_ctl_mmio_write(struct intel_vgpu *vgpu, unsigned int offset,
vgpu              401 drivers/gpu/drm/i915/gvt/handlers.c 	write_vreg(vgpu, offset, p_data, bytes);
vgpu              403 drivers/gpu/drm/i915/gvt/handlers.c 	if (vgpu_vreg(vgpu, offset) & LCPLL_PLL_DISABLE)
vgpu              404 drivers/gpu/drm/i915/gvt/handlers.c 		vgpu_vreg(vgpu, offset) &= ~LCPLL_PLL_LOCK;
vgpu              406 drivers/gpu/drm/i915/gvt/handlers.c 		vgpu_vreg(vgpu, offset) |= LCPLL_PLL_LOCK;
vgpu              408 drivers/gpu/drm/i915/gvt/handlers.c 	if (vgpu_vreg(vgpu, offset) & LCPLL_CD_SOURCE_FCLK)
vgpu              409 drivers/gpu/drm/i915/gvt/handlers.c 		vgpu_vreg(vgpu, offset) |= LCPLL_CD_SOURCE_FCLK_DONE;
vgpu              411 drivers/gpu/drm/i915/gvt/handlers.c 		vgpu_vreg(vgpu, offset) &= ~LCPLL_CD_SOURCE_FCLK_DONE;
vgpu              416 drivers/gpu/drm/i915/gvt/handlers.c static int dpy_reg_mmio_read(struct intel_vgpu *vgpu, unsigned int offset,
vgpu              424 drivers/gpu/drm/i915/gvt/handlers.c 		vgpu_vreg(vgpu, offset) = 1 << 17;
vgpu              427 drivers/gpu/drm/i915/gvt/handlers.c 		vgpu_vreg(vgpu, offset) = 0x3;
vgpu              430 drivers/gpu/drm/i915/gvt/handlers.c 		vgpu_vreg(vgpu, offset) = 0x2f << 16;
vgpu              436 drivers/gpu/drm/i915/gvt/handlers.c 	read_vreg(vgpu, offset, p_data, bytes);
vgpu              440 drivers/gpu/drm/i915/gvt/handlers.c static int pipeconf_mmio_write(struct intel_vgpu *vgpu, unsigned int offset,
vgpu              445 drivers/gpu/drm/i915/gvt/handlers.c 	write_vreg(vgpu, offset, p_data, bytes);
vgpu              446 drivers/gpu/drm/i915/gvt/handlers.c 	data = vgpu_vreg(vgpu, offset);
vgpu              449 drivers/gpu/drm/i915/gvt/handlers.c 		vgpu_vreg(vgpu, offset) |= I965_PIPECONF_ACTIVE;
vgpu              451 drivers/gpu/drm/i915/gvt/handlers.c 		vgpu_vreg(vgpu, offset) &= ~I965_PIPECONF_ACTIVE;
vgpu              453 drivers/gpu/drm/i915/gvt/handlers.c 	mutex_unlock(&vgpu->vgpu_lock);
vgpu              454 drivers/gpu/drm/i915/gvt/handlers.c 	intel_gvt_check_vblank_emulation(vgpu->gvt);
vgpu              455 drivers/gpu/drm/i915/gvt/handlers.c 	mutex_lock(&vgpu->vgpu_lock);
vgpu              508 drivers/gpu/drm/i915/gvt/handlers.c static int force_nonpriv_write(struct intel_vgpu *vgpu,
vgpu              512 drivers/gpu/drm/i915/gvt/handlers.c 	int ring_id = intel_gvt_render_mmio_to_ring_id(vgpu->gvt, offset);
vgpu              514 drivers/gpu/drm/i915/gvt/handlers.c 	struct drm_i915_private *dev_priv = vgpu->gvt->dev_priv;
vgpu              519 drivers/gpu/drm/i915/gvt/handlers.c 			vgpu->id, ring_id, offset, bytes);
vgpu              527 drivers/gpu/drm/i915/gvt/handlers.c 		ret = intel_vgpu_default_mmio_write(vgpu, offset, p_data,
vgpu              531 drivers/gpu/drm/i915/gvt/handlers.c 			vgpu->id, reg_nonpriv, offset);
vgpu              536 drivers/gpu/drm/i915/gvt/handlers.c static int ddi_buf_ctl_mmio_write(struct intel_vgpu *vgpu, unsigned int offset,
vgpu              539 drivers/gpu/drm/i915/gvt/handlers.c 	write_vreg(vgpu, offset, p_data, bytes);
vgpu              541 drivers/gpu/drm/i915/gvt/handlers.c 	if (vgpu_vreg(vgpu, offset) & DDI_BUF_CTL_ENABLE) {
vgpu              542 drivers/gpu/drm/i915/gvt/handlers.c 		vgpu_vreg(vgpu, offset) &= ~DDI_BUF_IS_IDLE;
vgpu              544 drivers/gpu/drm/i915/gvt/handlers.c 		vgpu_vreg(vgpu, offset) |= DDI_BUF_IS_IDLE;
vgpu              546 drivers/gpu/drm/i915/gvt/handlers.c 			vgpu_vreg_t(vgpu, DP_TP_STATUS(PORT_E))
vgpu              552 drivers/gpu/drm/i915/gvt/handlers.c static int fdi_rx_iir_mmio_write(struct intel_vgpu *vgpu,
vgpu              555 drivers/gpu/drm/i915/gvt/handlers.c 	vgpu_vreg(vgpu, offset) &= ~*(u32 *)p_data;
vgpu              562 drivers/gpu/drm/i915/gvt/handlers.c static int fdi_auto_training_started(struct intel_vgpu *vgpu)
vgpu              564 drivers/gpu/drm/i915/gvt/handlers.c 	u32 ddi_buf_ctl = vgpu_vreg_t(vgpu, DDI_BUF_CTL(PORT_E));
vgpu              565 drivers/gpu/drm/i915/gvt/handlers.c 	u32 rx_ctl = vgpu_vreg(vgpu, _FDI_RXA_CTL);
vgpu              566 drivers/gpu/drm/i915/gvt/handlers.c 	u32 tx_ctl = vgpu_vreg_t(vgpu, DP_TP_CTL(PORT_E));
vgpu              578 drivers/gpu/drm/i915/gvt/handlers.c static int check_fdi_rx_train_status(struct intel_vgpu *vgpu,
vgpu              607 drivers/gpu/drm/i915/gvt/handlers.c 	if (vgpu_vreg_t(vgpu, fdi_rx_imr) & fdi_iir_check_bits)
vgpu              610 drivers/gpu/drm/i915/gvt/handlers.c 	if (((vgpu_vreg_t(vgpu, fdi_tx_ctl) & fdi_tx_check_bits)
vgpu              612 drivers/gpu/drm/i915/gvt/handlers.c 		&& ((vgpu_vreg_t(vgpu, fdi_rx_ctl) & fdi_rx_check_bits)
vgpu              643 drivers/gpu/drm/i915/gvt/handlers.c static int update_fdi_rx_iir_status(struct intel_vgpu *vgpu,
vgpu              661 drivers/gpu/drm/i915/gvt/handlers.c 	write_vreg(vgpu, offset, p_data, bytes);
vgpu              665 drivers/gpu/drm/i915/gvt/handlers.c 	ret = check_fdi_rx_train_status(vgpu, index, FDI_LINK_TRAIN_PATTERN1);
vgpu              669 drivers/gpu/drm/i915/gvt/handlers.c 		vgpu_vreg_t(vgpu, fdi_rx_iir) |= FDI_RX_BIT_LOCK;
vgpu              671 drivers/gpu/drm/i915/gvt/handlers.c 	ret = check_fdi_rx_train_status(vgpu, index, FDI_LINK_TRAIN_PATTERN2);
vgpu              675 drivers/gpu/drm/i915/gvt/handlers.c 		vgpu_vreg_t(vgpu, fdi_rx_iir) |= FDI_RX_SYMBOL_LOCK;
vgpu              678 drivers/gpu/drm/i915/gvt/handlers.c 		if (fdi_auto_training_started(vgpu))
vgpu              679 drivers/gpu/drm/i915/gvt/handlers.c 			vgpu_vreg_t(vgpu, DP_TP_STATUS(PORT_E)) |=
vgpu              687 drivers/gpu/drm/i915/gvt/handlers.c static int dp_tp_ctl_mmio_write(struct intel_vgpu *vgpu, unsigned int offset,
vgpu              694 drivers/gpu/drm/i915/gvt/handlers.c 	write_vreg(vgpu, offset, p_data, bytes);
vgpu              697 drivers/gpu/drm/i915/gvt/handlers.c 	data = (vgpu_vreg(vgpu, offset) & GENMASK(10, 8)) >> 8;
vgpu              700 drivers/gpu/drm/i915/gvt/handlers.c 		vgpu_vreg_t(vgpu, status_reg) |= (1 << 25);
vgpu              705 drivers/gpu/drm/i915/gvt/handlers.c static int dp_tp_status_mmio_write(struct intel_vgpu *vgpu,
vgpu              714 drivers/gpu/drm/i915/gvt/handlers.c 	vgpu_vreg(vgpu, offset) = (reg_val & ~sticky_mask) |
vgpu              715 drivers/gpu/drm/i915/gvt/handlers.c 		(vgpu_vreg(vgpu, offset) & sticky_mask);
vgpu              716 drivers/gpu/drm/i915/gvt/handlers.c 	vgpu_vreg(vgpu, offset) &= ~(reg_val & sticky_mask);
vgpu              720 drivers/gpu/drm/i915/gvt/handlers.c static int pch_adpa_mmio_write(struct intel_vgpu *vgpu,
vgpu              725 drivers/gpu/drm/i915/gvt/handlers.c 	write_vreg(vgpu, offset, p_data, bytes);
vgpu              726 drivers/gpu/drm/i915/gvt/handlers.c 	data = vgpu_vreg(vgpu, offset);
vgpu              729 drivers/gpu/drm/i915/gvt/handlers.c 		vgpu_vreg(vgpu, offset) &= ~ADPA_CRT_HOTPLUG_FORCE_TRIGGER;
vgpu              733 drivers/gpu/drm/i915/gvt/handlers.c static int south_chicken2_mmio_write(struct intel_vgpu *vgpu,
vgpu              738 drivers/gpu/drm/i915/gvt/handlers.c 	write_vreg(vgpu, offset, p_data, bytes);
vgpu              739 drivers/gpu/drm/i915/gvt/handlers.c 	data = vgpu_vreg(vgpu, offset);
vgpu              742 drivers/gpu/drm/i915/gvt/handlers.c 		vgpu_vreg(vgpu, offset) |= FDI_MPHY_IOSFSB_RESET_STATUS;
vgpu              744 drivers/gpu/drm/i915/gvt/handlers.c 		vgpu_vreg(vgpu, offset) &= ~FDI_MPHY_IOSFSB_RESET_STATUS;
vgpu              751 drivers/gpu/drm/i915/gvt/handlers.c static int pri_surf_mmio_write(struct intel_vgpu *vgpu, unsigned int offset,
vgpu              754 drivers/gpu/drm/i915/gvt/handlers.c 	struct drm_i915_private *dev_priv = vgpu->gvt->dev_priv;
vgpu              758 drivers/gpu/drm/i915/gvt/handlers.c 	write_vreg(vgpu, offset, p_data, bytes);
vgpu              759 drivers/gpu/drm/i915/gvt/handlers.c 	vgpu_vreg_t(vgpu, DSPSURFLIVE(pipe)) = vgpu_vreg(vgpu, offset);
vgpu              761 drivers/gpu/drm/i915/gvt/handlers.c 	vgpu_vreg_t(vgpu, PIPE_FLIPCOUNT_G4X(pipe))++;
vgpu              763 drivers/gpu/drm/i915/gvt/handlers.c 	if (vgpu_vreg_t(vgpu, DSPCNTR(pipe)) & PLANE_CTL_ASYNC_FLIP)
vgpu              764 drivers/gpu/drm/i915/gvt/handlers.c 		intel_vgpu_trigger_virtual_event(vgpu, event);
vgpu              766 drivers/gpu/drm/i915/gvt/handlers.c 		set_bit(event, vgpu->irq.flip_done_event[pipe]);
vgpu              774 drivers/gpu/drm/i915/gvt/handlers.c static int spr_surf_mmio_write(struct intel_vgpu *vgpu, unsigned int offset,
vgpu              780 drivers/gpu/drm/i915/gvt/handlers.c 	write_vreg(vgpu, offset, p_data, bytes);
vgpu              781 drivers/gpu/drm/i915/gvt/handlers.c 	vgpu_vreg_t(vgpu, SPRSURFLIVE(pipe)) = vgpu_vreg(vgpu, offset);
vgpu              783 drivers/gpu/drm/i915/gvt/handlers.c 	if (vgpu_vreg_t(vgpu, SPRCTL(pipe)) & PLANE_CTL_ASYNC_FLIP)
vgpu              784 drivers/gpu/drm/i915/gvt/handlers.c 		intel_vgpu_trigger_virtual_event(vgpu, event);
vgpu              786 drivers/gpu/drm/i915/gvt/handlers.c 		set_bit(event, vgpu->irq.flip_done_event[pipe]);
vgpu              791 drivers/gpu/drm/i915/gvt/handlers.c static int reg50080_mmio_write(struct intel_vgpu *vgpu,
vgpu              795 drivers/gpu/drm/i915/gvt/handlers.c 	struct drm_i915_private *dev_priv = vgpu->gvt->dev_priv;
vgpu              800 drivers/gpu/drm/i915/gvt/handlers.c 	write_vreg(vgpu, offset, p_data, bytes);
vgpu              802 drivers/gpu/drm/i915/gvt/handlers.c 		vgpu_vreg_t(vgpu, DSPSURFLIVE(pipe)) = vgpu_vreg(vgpu, offset);
vgpu              803 drivers/gpu/drm/i915/gvt/handlers.c 		vgpu_vreg_t(vgpu, PIPE_FLIPCOUNT_G4X(pipe))++;
vgpu              805 drivers/gpu/drm/i915/gvt/handlers.c 		vgpu_vreg_t(vgpu, SPRSURFLIVE(pipe)) = vgpu_vreg(vgpu, offset);
vgpu              808 drivers/gpu/drm/i915/gvt/handlers.c 	if ((vgpu_vreg(vgpu, offset) & REG50080_FLIP_TYPE_MASK) == REG50080_FLIP_TYPE_ASYNC)
vgpu              809 drivers/gpu/drm/i915/gvt/handlers.c 		intel_vgpu_trigger_virtual_event(vgpu, event);
vgpu              811 drivers/gpu/drm/i915/gvt/handlers.c 		set_bit(event, vgpu->irq.flip_done_event[pipe]);
vgpu              816 drivers/gpu/drm/i915/gvt/handlers.c static int trigger_aux_channel_interrupt(struct intel_vgpu *vgpu,
vgpu              819 drivers/gpu/drm/i915/gvt/handlers.c 	struct drm_i915_private *dev_priv = vgpu->gvt->dev_priv;
vgpu              835 drivers/gpu/drm/i915/gvt/handlers.c 	intel_vgpu_trigger_virtual_event(vgpu, event);
vgpu              839 drivers/gpu/drm/i915/gvt/handlers.c static int dp_aux_ch_ctl_trans_done(struct intel_vgpu *vgpu, u32 value,
vgpu              855 drivers/gpu/drm/i915/gvt/handlers.c 	vgpu_vreg(vgpu, reg) = value;
vgpu              858 drivers/gpu/drm/i915/gvt/handlers.c 		return trigger_aux_channel_interrupt(vgpu, reg);
vgpu              901 drivers/gpu/drm/i915/gvt/handlers.c static int dp_aux_ch_ctl_mmio_write(struct intel_vgpu *vgpu,
vgpu              904 drivers/gpu/drm/i915/gvt/handlers.c 	struct intel_vgpu_display *display = &vgpu->display;
vgpu              916 drivers/gpu/drm/i915/gvt/handlers.c 	write_vreg(vgpu, offset, p_data, bytes);
vgpu              917 drivers/gpu/drm/i915/gvt/handlers.c 	data = vgpu_vreg(vgpu, offset);
vgpu              919 drivers/gpu/drm/i915/gvt/handlers.c 	if ((INTEL_GEN(vgpu->gvt->dev_priv) >= 9)
vgpu              923 drivers/gpu/drm/i915/gvt/handlers.c 	} else if (IS_BROADWELL(vgpu->gvt->dev_priv) &&
vgpu              931 drivers/gpu/drm/i915/gvt/handlers.c 		vgpu_vreg(vgpu, offset) = 0;
vgpu              939 drivers/gpu/drm/i915/gvt/handlers.c 	msg = vgpu_vreg(vgpu, offset + 4);
vgpu              959 drivers/gpu/drm/i915/gvt/handlers.c 			vgpu_vreg(vgpu, offset + 4) = AUX_NATIVE_REPLY_NAK;
vgpu              960 drivers/gpu/drm/i915/gvt/handlers.c 			dp_aux_ch_ctl_trans_done(vgpu, data, offset, 2, true);
vgpu              976 drivers/gpu/drm/i915/gvt/handlers.c 			u32 r = vgpu_vreg(vgpu, offset + 8 + t * 4);
vgpu              998 drivers/gpu/drm/i915/gvt/handlers.c 		vgpu_vreg(vgpu, offset + 4) = 0;
vgpu              999 drivers/gpu/drm/i915/gvt/handlers.c 		dp_aux_ch_ctl_trans_done(vgpu, data, offset, 1,
vgpu             1017 drivers/gpu/drm/i915/gvt/handlers.c 			vgpu_vreg(vgpu, offset + 4) = 0;
vgpu             1018 drivers/gpu/drm/i915/gvt/handlers.c 			vgpu_vreg(vgpu, offset + 8) = 0;
vgpu             1019 drivers/gpu/drm/i915/gvt/handlers.c 			vgpu_vreg(vgpu, offset + 12) = 0;
vgpu             1020 drivers/gpu/drm/i915/gvt/handlers.c 			vgpu_vreg(vgpu, offset + 16) = 0;
vgpu             1021 drivers/gpu/drm/i915/gvt/handlers.c 			vgpu_vreg(vgpu, offset + 20) = 0;
vgpu             1023 drivers/gpu/drm/i915/gvt/handlers.c 			dp_aux_ch_ctl_trans_done(vgpu, data, offset, len + 2,
vgpu             1030 drivers/gpu/drm/i915/gvt/handlers.c 			vgpu_vreg(vgpu, offset + 4 * idx) = 0;
vgpu             1052 drivers/gpu/drm/i915/gvt/handlers.c 					vgpu_vreg(vgpu, offset +
vgpu             1058 drivers/gpu/drm/i915/gvt/handlers.c 		dp_aux_ch_ctl_trans_done(vgpu, data, offset, len + 2,
vgpu             1064 drivers/gpu/drm/i915/gvt/handlers.c 	intel_gvt_i2c_handle_aux_ch_write(vgpu, port_index, offset, p_data);
vgpu             1067 drivers/gpu/drm/i915/gvt/handlers.c 		trigger_aux_channel_interrupt(vgpu, offset);
vgpu             1071 drivers/gpu/drm/i915/gvt/handlers.c static int mbctl_write(struct intel_vgpu *vgpu, unsigned int offset,
vgpu             1075 drivers/gpu/drm/i915/gvt/handlers.c 	write_vreg(vgpu, offset, p_data, bytes);
vgpu             1079 drivers/gpu/drm/i915/gvt/handlers.c static int vga_control_mmio_write(struct intel_vgpu *vgpu, unsigned int offset,
vgpu             1084 drivers/gpu/drm/i915/gvt/handlers.c 	write_vreg(vgpu, offset, p_data, bytes);
vgpu             1085 drivers/gpu/drm/i915/gvt/handlers.c 	vga_disable = vgpu_vreg(vgpu, offset) & VGA_DISP_DISABLE;
vgpu             1087 drivers/gpu/drm/i915/gvt/handlers.c 	gvt_dbg_core("vgpu%d: %s VGA mode\n", vgpu->id,
vgpu             1092 drivers/gpu/drm/i915/gvt/handlers.c static u32 read_virtual_sbi_register(struct intel_vgpu *vgpu,
vgpu             1095 drivers/gpu/drm/i915/gvt/handlers.c 	struct intel_vgpu_display *display = &vgpu->display;
vgpu             1109 drivers/gpu/drm/i915/gvt/handlers.c static void write_virtual_sbi_register(struct intel_vgpu *vgpu,
vgpu             1112 drivers/gpu/drm/i915/gvt/handlers.c 	struct intel_vgpu_display *display = &vgpu->display;
vgpu             1133 drivers/gpu/drm/i915/gvt/handlers.c static int sbi_data_mmio_read(struct intel_vgpu *vgpu, unsigned int offset,
vgpu             1136 drivers/gpu/drm/i915/gvt/handlers.c 	if (((vgpu_vreg_t(vgpu, SBI_CTL_STAT) & SBI_OPCODE_MASK) >>
vgpu             1138 drivers/gpu/drm/i915/gvt/handlers.c 		unsigned int sbi_offset = (vgpu_vreg_t(vgpu, SBI_ADDR) &
vgpu             1140 drivers/gpu/drm/i915/gvt/handlers.c 		vgpu_vreg(vgpu, offset) = read_virtual_sbi_register(vgpu,
vgpu             1143 drivers/gpu/drm/i915/gvt/handlers.c 	read_vreg(vgpu, offset, p_data, bytes);
vgpu             1147 drivers/gpu/drm/i915/gvt/handlers.c static int sbi_ctl_mmio_write(struct intel_vgpu *vgpu, unsigned int offset,
vgpu             1152 drivers/gpu/drm/i915/gvt/handlers.c 	write_vreg(vgpu, offset, p_data, bytes);
vgpu             1153 drivers/gpu/drm/i915/gvt/handlers.c 	data = vgpu_vreg(vgpu, offset);
vgpu             1161 drivers/gpu/drm/i915/gvt/handlers.c 	vgpu_vreg(vgpu, offset) = data;
vgpu             1163 drivers/gpu/drm/i915/gvt/handlers.c 	if (((vgpu_vreg_t(vgpu, SBI_CTL_STAT) & SBI_OPCODE_MASK) >>
vgpu             1165 drivers/gpu/drm/i915/gvt/handlers.c 		unsigned int sbi_offset = (vgpu_vreg_t(vgpu, SBI_ADDR) &
vgpu             1168 drivers/gpu/drm/i915/gvt/handlers.c 		write_virtual_sbi_register(vgpu, sbi_offset,
vgpu             1169 drivers/gpu/drm/i915/gvt/handlers.c 					   vgpu_vreg_t(vgpu, SBI_DATA));
vgpu             1177 drivers/gpu/drm/i915/gvt/handlers.c static int pvinfo_mmio_read(struct intel_vgpu *vgpu, unsigned int offset,
vgpu             1182 drivers/gpu/drm/i915/gvt/handlers.c 	read_vreg(vgpu, offset, p_data, bytes);
vgpu             1205 drivers/gpu/drm/i915/gvt/handlers.c 	vgpu->pv_notified = true;
vgpu             1209 drivers/gpu/drm/i915/gvt/handlers.c static int handle_g2v_notification(struct intel_vgpu *vgpu, int notification)
vgpu             1215 drivers/gpu/drm/i915/gvt/handlers.c 	pdps = (u64 *)&vgpu_vreg64_t(vgpu, vgtif_reg(pdp[0]));
vgpu             1222 drivers/gpu/drm/i915/gvt/handlers.c 		mm = intel_vgpu_get_ppgtt_mm(vgpu, root_entry_type, pdps);
vgpu             1226 drivers/gpu/drm/i915/gvt/handlers.c 		return intel_vgpu_put_ppgtt_mm(vgpu, pdps);
vgpu             1237 drivers/gpu/drm/i915/gvt/handlers.c static int send_display_ready_uevent(struct intel_vgpu *vgpu, int ready)
vgpu             1239 drivers/gpu/drm/i915/gvt/handlers.c 	struct drm_i915_private *dev_priv = vgpu->gvt->dev_priv;
vgpu             1248 drivers/gpu/drm/i915/gvt/handlers.c 	snprintf(vmid_str, 20, "VMID=%d", vgpu->id);
vgpu             1254 drivers/gpu/drm/i915/gvt/handlers.c static int pvinfo_mmio_write(struct intel_vgpu *vgpu, unsigned int offset,
vgpu             1262 drivers/gpu/drm/i915/gvt/handlers.c 		send_display_ready_uevent(vgpu, data ? 1 : 0);
vgpu             1265 drivers/gpu/drm/i915/gvt/handlers.c 		handle_g2v_notification(vgpu, data);
vgpu             1283 drivers/gpu/drm/i915/gvt/handlers.c 		enter_failsafe_mode(vgpu, GVT_FAILSAFE_INSUFFICIENT_RESOURCE);
vgpu             1293 drivers/gpu/drm/i915/gvt/handlers.c 		write_vreg(vgpu, offset, p_data, bytes);
vgpu             1298 drivers/gpu/drm/i915/gvt/handlers.c static int pf_write(struct intel_vgpu *vgpu,
vgpu             1307 drivers/gpu/drm/i915/gvt/handlers.c 			  vgpu->id);
vgpu             1311 drivers/gpu/drm/i915/gvt/handlers.c 	return intel_vgpu_default_mmio_write(vgpu, offset, p_data, bytes);
vgpu             1314 drivers/gpu/drm/i915/gvt/handlers.c static int power_well_ctl_mmio_write(struct intel_vgpu *vgpu,
vgpu             1317 drivers/gpu/drm/i915/gvt/handlers.c 	write_vreg(vgpu, offset, p_data, bytes);
vgpu             1319 drivers/gpu/drm/i915/gvt/handlers.c 	if (vgpu_vreg(vgpu, offset) &
vgpu             1321 drivers/gpu/drm/i915/gvt/handlers.c 		vgpu_vreg(vgpu, offset) |=
vgpu             1324 drivers/gpu/drm/i915/gvt/handlers.c 		vgpu_vreg(vgpu, offset) &=
vgpu             1329 drivers/gpu/drm/i915/gvt/handlers.c static int gen9_dbuf_ctl_mmio_write(struct intel_vgpu *vgpu,
vgpu             1332 drivers/gpu/drm/i915/gvt/handlers.c 	write_vreg(vgpu, offset, p_data, bytes);
vgpu             1334 drivers/gpu/drm/i915/gvt/handlers.c 	if (vgpu_vreg(vgpu, offset) & DBUF_POWER_REQUEST)
vgpu             1335 drivers/gpu/drm/i915/gvt/handlers.c 		vgpu_vreg(vgpu, offset) |= DBUF_POWER_STATE;
vgpu             1337 drivers/gpu/drm/i915/gvt/handlers.c 		vgpu_vreg(vgpu, offset) &= ~DBUF_POWER_STATE;
vgpu             1342 drivers/gpu/drm/i915/gvt/handlers.c static int fpga_dbg_mmio_write(struct intel_vgpu *vgpu,
vgpu             1345 drivers/gpu/drm/i915/gvt/handlers.c 	write_vreg(vgpu, offset, p_data, bytes);
vgpu             1347 drivers/gpu/drm/i915/gvt/handlers.c 	if (vgpu_vreg(vgpu, offset) & FPGA_DBG_RM_NOCLAIM)
vgpu             1348 drivers/gpu/drm/i915/gvt/handlers.c 		vgpu_vreg(vgpu, offset) &= ~FPGA_DBG_RM_NOCLAIM;
vgpu             1352 drivers/gpu/drm/i915/gvt/handlers.c static int dma_ctrl_write(struct intel_vgpu *vgpu, unsigned int offset,
vgpu             1357 drivers/gpu/drm/i915/gvt/handlers.c 	write_vreg(vgpu, offset, p_data, bytes);
vgpu             1358 drivers/gpu/drm/i915/gvt/handlers.c 	mode = vgpu_vreg(vgpu, offset);
vgpu             1362 drivers/gpu/drm/i915/gvt/handlers.c 				vgpu->id);
vgpu             1369 drivers/gpu/drm/i915/gvt/handlers.c static int gen9_trtte_write(struct intel_vgpu *vgpu, unsigned int offset,
vgpu             1376 drivers/gpu/drm/i915/gvt/handlers.c 				vgpu->id);
vgpu             1379 drivers/gpu/drm/i915/gvt/handlers.c 	write_vreg(vgpu, offset, p_data, bytes);
vgpu             1384 drivers/gpu/drm/i915/gvt/handlers.c static int gen9_trtt_chicken_write(struct intel_vgpu *vgpu, unsigned int offset,
vgpu             1387 drivers/gpu/drm/i915/gvt/handlers.c 	write_vreg(vgpu, offset, p_data, bytes);
vgpu             1391 drivers/gpu/drm/i915/gvt/handlers.c static int dpll_status_read(struct intel_vgpu *vgpu, unsigned int offset,
vgpu             1396 drivers/gpu/drm/i915/gvt/handlers.c 	if (vgpu_vreg(vgpu, 0x46010) & (1 << 31))
vgpu             1399 drivers/gpu/drm/i915/gvt/handlers.c 	if (vgpu_vreg(vgpu, 0x46014) & (1 << 31))
vgpu             1402 drivers/gpu/drm/i915/gvt/handlers.c 	if (vgpu_vreg(vgpu, 0x46040) & (1 << 31))
vgpu             1405 drivers/gpu/drm/i915/gvt/handlers.c 	if (vgpu_vreg(vgpu, 0x46060) & (1 << 31))
vgpu             1408 drivers/gpu/drm/i915/gvt/handlers.c 	vgpu_vreg(vgpu, offset) = v;
vgpu             1410 drivers/gpu/drm/i915/gvt/handlers.c 	return intel_vgpu_default_mmio_read(vgpu, offset, p_data, bytes);
vgpu             1413 drivers/gpu/drm/i915/gvt/handlers.c static int mailbox_write(struct intel_vgpu *vgpu, unsigned int offset,
vgpu             1418 drivers/gpu/drm/i915/gvt/handlers.c 	u32 *data0 = &vgpu_vreg_t(vgpu, GEN6_PCODE_DATA);
vgpu             1422 drivers/gpu/drm/i915/gvt/handlers.c 		if (IS_SKYLAKE(vgpu->gvt->dev_priv)
vgpu             1423 drivers/gpu/drm/i915/gvt/handlers.c 			 || IS_KABYLAKE(vgpu->gvt->dev_priv)
vgpu             1424 drivers/gpu/drm/i915/gvt/handlers.c 			 || IS_COFFEELAKE(vgpu->gvt->dev_priv)) {
vgpu             1434 drivers/gpu/drm/i915/gvt/handlers.c 		} else if (IS_BROXTON(vgpu->gvt->dev_priv)) {
vgpu             1447 drivers/gpu/drm/i915/gvt/handlers.c 		if (IS_SKYLAKE(vgpu->gvt->dev_priv)
vgpu             1448 drivers/gpu/drm/i915/gvt/handlers.c 			 || IS_KABYLAKE(vgpu->gvt->dev_priv)
vgpu             1449 drivers/gpu/drm/i915/gvt/handlers.c 			 || IS_COFFEELAKE(vgpu->gvt->dev_priv))
vgpu             1458 drivers/gpu/drm/i915/gvt/handlers.c 		     vgpu->id, value, *data0);
vgpu             1466 drivers/gpu/drm/i915/gvt/handlers.c 	return intel_vgpu_default_mmio_write(vgpu, offset, &value, bytes);
vgpu             1469 drivers/gpu/drm/i915/gvt/handlers.c static int hws_pga_write(struct intel_vgpu *vgpu, unsigned int offset,
vgpu             1473 drivers/gpu/drm/i915/gvt/handlers.c 	int ring_id = intel_gvt_render_mmio_to_ring_id(vgpu->gvt, offset);
vgpu             1475 drivers/gpu/drm/i915/gvt/handlers.c 	if (!intel_gvt_ggtt_validate_range(vgpu, value, I915_GTT_PAGE_SIZE)) {
vgpu             1490 drivers/gpu/drm/i915/gvt/handlers.c 	vgpu->hws_pga[ring_id] = value;
vgpu             1492 drivers/gpu/drm/i915/gvt/handlers.c 		     vgpu->id, value, offset);
vgpu             1494 drivers/gpu/drm/i915/gvt/handlers.c 	return intel_vgpu_default_mmio_write(vgpu, offset, &value, bytes);
vgpu             1497 drivers/gpu/drm/i915/gvt/handlers.c static int skl_power_well_ctl_write(struct intel_vgpu *vgpu,
vgpu             1502 drivers/gpu/drm/i915/gvt/handlers.c 	if (IS_BROXTON(vgpu->gvt->dev_priv))
vgpu             1509 drivers/gpu/drm/i915/gvt/handlers.c 	return intel_vgpu_default_mmio_write(vgpu, offset, &v, bytes);
vgpu             1512 drivers/gpu/drm/i915/gvt/handlers.c static int skl_lcpll_write(struct intel_vgpu *vgpu, unsigned int offset,
vgpu             1521 drivers/gpu/drm/i915/gvt/handlers.c 	vgpu_vreg(vgpu, offset) = v;
vgpu             1526 drivers/gpu/drm/i915/gvt/handlers.c static int bxt_de_pll_enable_write(struct intel_vgpu *vgpu,
vgpu             1534 drivers/gpu/drm/i915/gvt/handlers.c 	vgpu_vreg(vgpu, offset) = v;
vgpu             1539 drivers/gpu/drm/i915/gvt/handlers.c static int bxt_port_pll_enable_write(struct intel_vgpu *vgpu,
vgpu             1547 drivers/gpu/drm/i915/gvt/handlers.c 	vgpu_vreg(vgpu, offset) = v;
vgpu             1552 drivers/gpu/drm/i915/gvt/handlers.c static int bxt_phy_ctl_family_write(struct intel_vgpu *vgpu,
vgpu             1560 drivers/gpu/drm/i915/gvt/handlers.c 		vgpu_vreg(vgpu, _BXT_PHY_CTL_DDI_A) = data;
vgpu             1563 drivers/gpu/drm/i915/gvt/handlers.c 		vgpu_vreg(vgpu, _BXT_PHY_CTL_DDI_B) = data;
vgpu             1564 drivers/gpu/drm/i915/gvt/handlers.c 		vgpu_vreg(vgpu, _BXT_PHY_CTL_DDI_C) = data;
vgpu             1568 drivers/gpu/drm/i915/gvt/handlers.c 	vgpu_vreg(vgpu, offset) = v;
vgpu             1573 drivers/gpu/drm/i915/gvt/handlers.c static int bxt_port_tx_dw3_read(struct intel_vgpu *vgpu,
vgpu             1576 drivers/gpu/drm/i915/gvt/handlers.c 	u32 v = vgpu_vreg(vgpu, offset);
vgpu             1580 drivers/gpu/drm/i915/gvt/handlers.c 	vgpu_vreg(vgpu, offset) = v;
vgpu             1582 drivers/gpu/drm/i915/gvt/handlers.c 	return intel_vgpu_default_mmio_read(vgpu, offset, p_data, bytes);
vgpu             1585 drivers/gpu/drm/i915/gvt/handlers.c static int bxt_pcs_dw12_grp_write(struct intel_vgpu *vgpu,
vgpu             1591 drivers/gpu/drm/i915/gvt/handlers.c 		vgpu_vreg(vgpu, offset - 0x600) = v;
vgpu             1592 drivers/gpu/drm/i915/gvt/handlers.c 		vgpu_vreg(vgpu, offset - 0x800) = v;
vgpu             1594 drivers/gpu/drm/i915/gvt/handlers.c 		vgpu_vreg(vgpu, offset - 0x400) = v;
vgpu             1595 drivers/gpu/drm/i915/gvt/handlers.c 		vgpu_vreg(vgpu, offset - 0x600) = v;
vgpu             1598 drivers/gpu/drm/i915/gvt/handlers.c 	vgpu_vreg(vgpu, offset) = v;
vgpu             1603 drivers/gpu/drm/i915/gvt/handlers.c static int bxt_gt_disp_pwron_write(struct intel_vgpu *vgpu,
vgpu             1609 drivers/gpu/drm/i915/gvt/handlers.c 		vgpu_vreg_t(vgpu, BXT_PORT_CL1CM_DW0(DPIO_PHY0)) &=
vgpu             1611 drivers/gpu/drm/i915/gvt/handlers.c 		vgpu_vreg_t(vgpu, BXT_PORT_CL1CM_DW0(DPIO_PHY0)) |=
vgpu             1616 drivers/gpu/drm/i915/gvt/handlers.c 		vgpu_vreg_t(vgpu, BXT_PORT_CL1CM_DW0(DPIO_PHY1)) &=
vgpu             1618 drivers/gpu/drm/i915/gvt/handlers.c 		vgpu_vreg_t(vgpu, BXT_PORT_CL1CM_DW0(DPIO_PHY1)) |=
vgpu             1623 drivers/gpu/drm/i915/gvt/handlers.c 	vgpu_vreg(vgpu, offset) = v;
vgpu             1628 drivers/gpu/drm/i915/gvt/handlers.c static int edp_psr_imr_iir_write(struct intel_vgpu *vgpu,
vgpu             1631 drivers/gpu/drm/i915/gvt/handlers.c 	vgpu_vreg(vgpu, offset) = 0;
vgpu             1635 drivers/gpu/drm/i915/gvt/handlers.c static int mmio_read_from_hw(struct intel_vgpu *vgpu,
vgpu             1638 drivers/gpu/drm/i915/gvt/handlers.c 	struct intel_gvt *gvt = vgpu->gvt;
vgpu             1653 drivers/gpu/drm/i915/gvt/handlers.c 	if (ring_id < 0 || vgpu  == gvt->scheduler.engine_owner[ring_id] ||
vgpu             1657 drivers/gpu/drm/i915/gvt/handlers.c 		vgpu_vreg(vgpu, offset) = I915_READ(_MMIO(offset));
vgpu             1661 drivers/gpu/drm/i915/gvt/handlers.c 	return intel_vgpu_default_mmio_read(vgpu, offset, p_data, bytes);
vgpu             1664 drivers/gpu/drm/i915/gvt/handlers.c static int elsp_mmio_write(struct intel_vgpu *vgpu, unsigned int offset,
vgpu             1667 drivers/gpu/drm/i915/gvt/handlers.c 	int ring_id = intel_gvt_render_mmio_to_ring_id(vgpu->gvt, offset);
vgpu             1675 drivers/gpu/drm/i915/gvt/handlers.c 	execlist = &vgpu->submission.execlist[ring_id];
vgpu             1679 drivers/gpu/drm/i915/gvt/handlers.c 		ret = intel_vgpu_submit_execlist(vgpu, ring_id);
vgpu             1690 drivers/gpu/drm/i915/gvt/handlers.c static int ring_mode_mmio_write(struct intel_vgpu *vgpu, unsigned int offset,
vgpu             1694 drivers/gpu/drm/i915/gvt/handlers.c 	int ring_id = intel_gvt_render_mmio_to_ring_id(vgpu->gvt, offset);
vgpu             1699 drivers/gpu/drm/i915/gvt/handlers.c 	if (IS_COFFEELAKE(vgpu->gvt->dev_priv))
vgpu             1701 drivers/gpu/drm/i915/gvt/handlers.c 	write_vreg(vgpu, offset, p_data, bytes);
vgpu             1704 drivers/gpu/drm/i915/gvt/handlers.c 		enter_failsafe_mode(vgpu, GVT_FAILSAFE_UNSUPPORTED_GUEST);
vgpu             1708 drivers/gpu/drm/i915/gvt/handlers.c 	if (IS_COFFEELAKE(vgpu->gvt->dev_priv) &&
vgpu             1710 drivers/gpu/drm/i915/gvt/handlers.c 		enter_failsafe_mode(vgpu, GVT_FAILSAFE_UNSUPPORTED_GUEST);
vgpu             1720 drivers/gpu/drm/i915/gvt/handlers.c 			&& !vgpu->pv_notified) {
vgpu             1721 drivers/gpu/drm/i915/gvt/handlers.c 		enter_failsafe_mode(vgpu, GVT_FAILSAFE_UNSUPPORTED_GUEST);
vgpu             1735 drivers/gpu/drm/i915/gvt/handlers.c 		ret = intel_vgpu_select_submission_ops(vgpu,
vgpu             1741 drivers/gpu/drm/i915/gvt/handlers.c 		intel_vgpu_start_schedule(vgpu);
vgpu             1746 drivers/gpu/drm/i915/gvt/handlers.c static int gvt_reg_tlb_control_handler(struct intel_vgpu *vgpu,
vgpu             1751 drivers/gpu/drm/i915/gvt/handlers.c 	write_vreg(vgpu, offset, p_data, bytes);
vgpu             1752 drivers/gpu/drm/i915/gvt/handlers.c 	vgpu_vreg(vgpu, offset) = 0;
vgpu             1773 drivers/gpu/drm/i915/gvt/handlers.c 	set_bit(id, (void *)vgpu->submission.tlb_handle_pending);
vgpu             1778 drivers/gpu/drm/i915/gvt/handlers.c static int ring_reset_ctl_write(struct intel_vgpu *vgpu,
vgpu             1783 drivers/gpu/drm/i915/gvt/handlers.c 	write_vreg(vgpu, offset, p_data, bytes);
vgpu             1784 drivers/gpu/drm/i915/gvt/handlers.c 	data = vgpu_vreg(vgpu, offset);
vgpu             1791 drivers/gpu/drm/i915/gvt/handlers.c 	vgpu_vreg(vgpu, offset) = data;
vgpu             1795 drivers/gpu/drm/i915/gvt/handlers.c static int csfe_chicken1_mmio_write(struct intel_vgpu *vgpu,
vgpu             1802 drivers/gpu/drm/i915/gvt/handlers.c 	write_vreg(vgpu, offset, p_data, bytes);
vgpu             1805 drivers/gpu/drm/i915/gvt/handlers.c 		enter_failsafe_mode(vgpu, GVT_FAILSAFE_UNSUPPORTED_GUEST);
vgpu             3441 drivers/gpu/drm/i915/gvt/handlers.c int intel_vgpu_default_mmio_read(struct intel_vgpu *vgpu, unsigned int offset,
vgpu             3444 drivers/gpu/drm/i915/gvt/handlers.c 	read_vreg(vgpu, offset, p_data, bytes);
vgpu             3458 drivers/gpu/drm/i915/gvt/handlers.c int intel_vgpu_default_mmio_write(struct intel_vgpu *vgpu, unsigned int offset,
vgpu             3461 drivers/gpu/drm/i915/gvt/handlers.c 	write_vreg(vgpu, offset, p_data, bytes);
vgpu             3475 drivers/gpu/drm/i915/gvt/handlers.c int intel_vgpu_mask_mmio_write(struct intel_vgpu *vgpu, unsigned int offset,
vgpu             3480 drivers/gpu/drm/i915/gvt/handlers.c 	old_vreg = vgpu_vreg(vgpu, offset);
vgpu             3481 drivers/gpu/drm/i915/gvt/handlers.c 	write_vreg(vgpu, offset, p_data, bytes);
vgpu             3482 drivers/gpu/drm/i915/gvt/handlers.c 	mask = vgpu_vreg(vgpu, offset) >> 16;
vgpu             3483 drivers/gpu/drm/i915/gvt/handlers.c 	vgpu_vreg(vgpu, offset) = (old_vreg & ~mask) |
vgpu             3484 drivers/gpu/drm/i915/gvt/handlers.c 				(vgpu_vreg(vgpu, offset) & mask);
vgpu             3517 drivers/gpu/drm/i915/gvt/handlers.c int intel_vgpu_mmio_reg_rw(struct intel_vgpu *vgpu, unsigned int offset,
vgpu             3520 drivers/gpu/drm/i915/gvt/handlers.c 	struct intel_gvt *gvt = vgpu->gvt;
vgpu             3536 drivers/gpu/drm/i915/gvt/handlers.c 			return func(vgpu, offset, pdata, bytes);
vgpu             3550 drivers/gpu/drm/i915/gvt/handlers.c 		return mmio_info->read(vgpu, offset, pdata, bytes);
vgpu             3557 drivers/gpu/drm/i915/gvt/handlers.c 			old_vreg = vgpu_vreg(vgpu, offset);
vgpu             3561 drivers/gpu/drm/i915/gvt/handlers.c 			ret = mmio_info->write(vgpu, offset, pdata, bytes);
vgpu             3569 drivers/gpu/drm/i915/gvt/handlers.c 			data |= vgpu_vreg(vgpu, offset) & ro_mask;
vgpu             3570 drivers/gpu/drm/i915/gvt/handlers.c 			ret = mmio_info->write(vgpu, offset, &data, bytes);
vgpu             3575 drivers/gpu/drm/i915/gvt/handlers.c 			u32 mask = vgpu_vreg(vgpu, offset) >> 16;
vgpu             3577 drivers/gpu/drm/i915/gvt/handlers.c 			vgpu_vreg(vgpu, offset) = (old_vreg & ~mask)
vgpu             3578 drivers/gpu/drm/i915/gvt/handlers.c 					| (vgpu_vreg(vgpu, offset) & mask);
vgpu             3586 drivers/gpu/drm/i915/gvt/handlers.c 		intel_vgpu_default_mmio_read(vgpu, offset, pdata, bytes) :
vgpu             3587 drivers/gpu/drm/i915/gvt/handlers.c 		intel_vgpu_default_mmio_write(vgpu, offset, pdata, bytes);
vgpu               49 drivers/gpu/drm/i915/gvt/hypercall.h 	int (*attach_vgpu)(void *vgpu, unsigned long *handle);
vgpu               50 drivers/gpu/drm/i915/gvt/hypercall.h 	void (*detach_vgpu)(void *vgpu);
vgpu               69 drivers/gpu/drm/i915/gvt/hypercall.h 	int (*set_opregion)(void *vgpu);
vgpu               70 drivers/gpu/drm/i915/gvt/hypercall.h 	int (*set_edid)(void *vgpu, int port_num);
vgpu               71 drivers/gpu/drm/i915/gvt/hypercall.h 	int (*get_vfio_device)(void *vgpu);
vgpu               72 drivers/gpu/drm/i915/gvt/hypercall.h 	void (*put_vfio_device)(void *vgpu);
vgpu               51 drivers/gpu/drm/i915/gvt/interrupt.c static void update_upstream_irq(struct intel_vgpu *vgpu,
vgpu              175 drivers/gpu/drm/i915/gvt/interrupt.c int intel_vgpu_reg_imr_handler(struct intel_vgpu *vgpu,
vgpu              178 drivers/gpu/drm/i915/gvt/interrupt.c 	struct intel_gvt *gvt = vgpu->gvt;
vgpu              182 drivers/gpu/drm/i915/gvt/interrupt.c 	trace_write_ir(vgpu->id, "IMR", reg, imr, vgpu_vreg(vgpu, reg),
vgpu              183 drivers/gpu/drm/i915/gvt/interrupt.c 		       (vgpu_vreg(vgpu, reg) ^ imr));
vgpu              185 drivers/gpu/drm/i915/gvt/interrupt.c 	vgpu_vreg(vgpu, reg) = imr;
vgpu              187 drivers/gpu/drm/i915/gvt/interrupt.c 	ops->check_pending_irq(vgpu);
vgpu              205 drivers/gpu/drm/i915/gvt/interrupt.c int intel_vgpu_reg_master_irq_handler(struct intel_vgpu *vgpu,
vgpu              208 drivers/gpu/drm/i915/gvt/interrupt.c 	struct intel_gvt *gvt = vgpu->gvt;
vgpu              211 drivers/gpu/drm/i915/gvt/interrupt.c 	u32 virtual_ier = vgpu_vreg(vgpu, reg);
vgpu              213 drivers/gpu/drm/i915/gvt/interrupt.c 	trace_write_ir(vgpu->id, "MASTER_IRQ", reg, ier, virtual_ier,
vgpu              223 drivers/gpu/drm/i915/gvt/interrupt.c 	vgpu_vreg(vgpu, reg) &= ~GEN8_MASTER_IRQ_CONTROL;
vgpu              224 drivers/gpu/drm/i915/gvt/interrupt.c 	vgpu_vreg(vgpu, reg) |= ier;
vgpu              226 drivers/gpu/drm/i915/gvt/interrupt.c 	ops->check_pending_irq(vgpu);
vgpu              244 drivers/gpu/drm/i915/gvt/interrupt.c int intel_vgpu_reg_ier_handler(struct intel_vgpu *vgpu,
vgpu              247 drivers/gpu/drm/i915/gvt/interrupt.c 	struct intel_gvt *gvt = vgpu->gvt;
vgpu              252 drivers/gpu/drm/i915/gvt/interrupt.c 	trace_write_ir(vgpu->id, "IER", reg, ier, vgpu_vreg(vgpu, reg),
vgpu              253 drivers/gpu/drm/i915/gvt/interrupt.c 		       (vgpu_vreg(vgpu, reg) ^ ier));
vgpu              255 drivers/gpu/drm/i915/gvt/interrupt.c 	vgpu_vreg(vgpu, reg) = ier;
vgpu              262 drivers/gpu/drm/i915/gvt/interrupt.c 		update_upstream_irq(vgpu, info);
vgpu              264 drivers/gpu/drm/i915/gvt/interrupt.c 	ops->check_pending_irq(vgpu);
vgpu              282 drivers/gpu/drm/i915/gvt/interrupt.c int intel_vgpu_reg_iir_handler(struct intel_vgpu *vgpu, unsigned int reg,
vgpu              285 drivers/gpu/drm/i915/gvt/interrupt.c 	struct intel_gvt_irq_info *info = regbase_to_irq_info(vgpu->gvt,
vgpu              289 drivers/gpu/drm/i915/gvt/interrupt.c 	trace_write_ir(vgpu->id, "IIR", reg, iir, vgpu_vreg(vgpu, reg),
vgpu              290 drivers/gpu/drm/i915/gvt/interrupt.c 		       (vgpu_vreg(vgpu, reg) ^ iir));
vgpu              295 drivers/gpu/drm/i915/gvt/interrupt.c 	vgpu_vreg(vgpu, reg) &= ~iir;
vgpu              298 drivers/gpu/drm/i915/gvt/interrupt.c 		update_upstream_irq(vgpu, info);
vgpu              319 drivers/gpu/drm/i915/gvt/interrupt.c static void update_upstream_irq(struct intel_vgpu *vgpu,
vgpu              322 drivers/gpu/drm/i915/gvt/interrupt.c 	struct intel_gvt_irq *irq = &vgpu->gvt->irq;
vgpu              328 drivers/gpu/drm/i915/gvt/interrupt.c 	u32 val = vgpu_vreg(vgpu,
vgpu              330 drivers/gpu/drm/i915/gvt/interrupt.c 		& vgpu_vreg(vgpu,
vgpu              359 drivers/gpu/drm/i915/gvt/interrupt.c 		vgpu_vreg(vgpu, isr) &= ~clear_bits;
vgpu              360 drivers/gpu/drm/i915/gvt/interrupt.c 		vgpu_vreg(vgpu, isr) |= set_bits;
vgpu              367 drivers/gpu/drm/i915/gvt/interrupt.c 		vgpu_vreg(vgpu, iir) |= (set_bits & ~vgpu_vreg(vgpu, imr));
vgpu              371 drivers/gpu/drm/i915/gvt/interrupt.c 		update_upstream_irq(vgpu, up_irq_info);
vgpu              395 drivers/gpu/drm/i915/gvt/interrupt.c static int inject_virtual_interrupt(struct intel_vgpu *vgpu)
vgpu              397 drivers/gpu/drm/i915/gvt/interrupt.c 	return intel_gvt_hypervisor_inject_msi(vgpu);
vgpu              401 drivers/gpu/drm/i915/gvt/interrupt.c 	enum intel_gvt_event_type event, struct intel_vgpu *vgpu)
vgpu              414 drivers/gpu/drm/i915/gvt/interrupt.c 	if (!test_bit(bit, (void *)&vgpu_vreg(vgpu,
vgpu              416 drivers/gpu/drm/i915/gvt/interrupt.c 		trace_propagate_event(vgpu->id, irq_name[event], bit);
vgpu              417 drivers/gpu/drm/i915/gvt/interrupt.c 		set_bit(bit, (void *)&vgpu_vreg(vgpu,
vgpu              424 drivers/gpu/drm/i915/gvt/interrupt.c 	enum intel_gvt_event_type event, struct intel_vgpu *vgpu)
vgpu              426 drivers/gpu/drm/i915/gvt/interrupt.c 	if (!vgpu->irq.irq_warn_once[event]) {
vgpu              428 drivers/gpu/drm/i915/gvt/interrupt.c 			vgpu->id, event, irq_name[event]);
vgpu              429 drivers/gpu/drm/i915/gvt/interrupt.c 		vgpu->irq.irq_warn_once[event] = true;
vgpu              431 drivers/gpu/drm/i915/gvt/interrupt.c 	propagate_event(irq, event, vgpu);
vgpu              464 drivers/gpu/drm/i915/gvt/interrupt.c static void gen8_check_pending_irq(struct intel_vgpu *vgpu)
vgpu              466 drivers/gpu/drm/i915/gvt/interrupt.c 	struct intel_gvt_irq *irq = &vgpu->gvt->irq;
vgpu              469 drivers/gpu/drm/i915/gvt/interrupt.c 	if (!(vgpu_vreg(vgpu, i915_mmio_reg_offset(GEN8_MASTER_IRQ)) &
vgpu              481 drivers/gpu/drm/i915/gvt/interrupt.c 		if ((vgpu_vreg(vgpu, regbase_to_iir(reg_base))
vgpu              482 drivers/gpu/drm/i915/gvt/interrupt.c 				& vgpu_vreg(vgpu, regbase_to_ier(reg_base))))
vgpu              483 drivers/gpu/drm/i915/gvt/interrupt.c 			update_upstream_irq(vgpu, info);
vgpu              486 drivers/gpu/drm/i915/gvt/interrupt.c 	if (vgpu_vreg(vgpu, i915_mmio_reg_offset(GEN8_MASTER_IRQ))
vgpu              488 drivers/gpu/drm/i915/gvt/interrupt.c 		inject_virtual_interrupt(vgpu);
vgpu              618 drivers/gpu/drm/i915/gvt/interrupt.c void intel_vgpu_trigger_virtual_event(struct intel_vgpu *vgpu,
vgpu              621 drivers/gpu/drm/i915/gvt/interrupt.c 	struct intel_gvt *gvt = vgpu->gvt;
vgpu              629 drivers/gpu/drm/i915/gvt/interrupt.c 	handler(irq, event, vgpu);
vgpu              631 drivers/gpu/drm/i915/gvt/interrupt.c 	ops->check_pending_irq(vgpu);
vgpu              140 drivers/gpu/drm/i915/gvt/interrupt.h 	enum intel_gvt_event_type event, struct intel_vgpu *vgpu);
vgpu              144 drivers/gpu/drm/i915/gvt/interrupt.h 	void (*check_pending_irq)(struct intel_vgpu *vgpu);
vgpu              217 drivers/gpu/drm/i915/gvt/interrupt.h void intel_vgpu_trigger_virtual_event(struct intel_vgpu *vgpu,
vgpu              220 drivers/gpu/drm/i915/gvt/interrupt.h int intel_vgpu_reg_iir_handler(struct intel_vgpu *vgpu, unsigned int reg,
vgpu              222 drivers/gpu/drm/i915/gvt/interrupt.h int intel_vgpu_reg_ier_handler(struct intel_vgpu *vgpu,
vgpu              224 drivers/gpu/drm/i915/gvt/interrupt.h int intel_vgpu_reg_master_irq_handler(struct intel_vgpu *vgpu,
vgpu              226 drivers/gpu/drm/i915/gvt/interrupt.h int intel_vgpu_reg_imr_handler(struct intel_vgpu *vgpu,
vgpu               66 drivers/gpu/drm/i915/gvt/kvmgt.c 	size_t (*rw)(struct intel_vgpu *vgpu, char *buf,
vgpu               68 drivers/gpu/drm/i915/gvt/kvmgt.c 	void (*release)(struct intel_vgpu *vgpu,
vgpu               93 drivers/gpu/drm/i915/gvt/kvmgt.c 	struct intel_vgpu *vgpu;
vgpu              102 drivers/gpu/drm/i915/gvt/kvmgt.c 	struct intel_vgpu *vgpu;
vgpu              120 drivers/gpu/drm/i915/gvt/kvmgt.c static void gvt_unpin_guest_page(struct intel_vgpu *vgpu, unsigned long gfn,
vgpu              132 drivers/gpu/drm/i915/gvt/kvmgt.c 		ret = vfio_unpin_pages(mdev_dev(vgpu->vdev.mdev), &cur_gfn, 1);
vgpu              138 drivers/gpu/drm/i915/gvt/kvmgt.c static int gvt_pin_guest_page(struct intel_vgpu *vgpu, unsigned long gfn,
vgpu              155 drivers/gpu/drm/i915/gvt/kvmgt.c 		ret = vfio_pin_pages(mdev_dev(vgpu->vdev.mdev), &cur_gfn, 1,
vgpu              183 drivers/gpu/drm/i915/gvt/kvmgt.c 	gvt_unpin_guest_page(vgpu, gfn, npage * PAGE_SIZE);
vgpu              187 drivers/gpu/drm/i915/gvt/kvmgt.c static int gvt_dma_map_page(struct intel_vgpu *vgpu, unsigned long gfn,
vgpu              190 drivers/gpu/drm/i915/gvt/kvmgt.c 	struct device *dev = &vgpu->gvt->dev_priv->drm.pdev->dev;
vgpu              194 drivers/gpu/drm/i915/gvt/kvmgt.c 	ret = gvt_pin_guest_page(vgpu, gfn, size, &page);
vgpu              203 drivers/gpu/drm/i915/gvt/kvmgt.c 		gvt_unpin_guest_page(vgpu, gfn, size);
vgpu              210 drivers/gpu/drm/i915/gvt/kvmgt.c static void gvt_dma_unmap_page(struct intel_vgpu *vgpu, unsigned long gfn,
vgpu              213 drivers/gpu/drm/i915/gvt/kvmgt.c 	struct device *dev = &vgpu->gvt->dev_priv->drm.pdev->dev;
vgpu              216 drivers/gpu/drm/i915/gvt/kvmgt.c 	gvt_unpin_guest_page(vgpu, gfn, size);
vgpu              219 drivers/gpu/drm/i915/gvt/kvmgt.c static struct gvt_dma *__gvt_cache_find_dma_addr(struct intel_vgpu *vgpu,
vgpu              222 drivers/gpu/drm/i915/gvt/kvmgt.c 	struct rb_node *node = vgpu->vdev.dma_addr_cache.rb_node;
vgpu              238 drivers/gpu/drm/i915/gvt/kvmgt.c static struct gvt_dma *__gvt_cache_find_gfn(struct intel_vgpu *vgpu, gfn_t gfn)
vgpu              240 drivers/gpu/drm/i915/gvt/kvmgt.c 	struct rb_node *node = vgpu->vdev.gfn_cache.rb_node;
vgpu              256 drivers/gpu/drm/i915/gvt/kvmgt.c static int __gvt_cache_add(struct intel_vgpu *vgpu, gfn_t gfn,
vgpu              266 drivers/gpu/drm/i915/gvt/kvmgt.c 	new->vgpu = vgpu;
vgpu              273 drivers/gpu/drm/i915/gvt/kvmgt.c 	link = &vgpu->vdev.gfn_cache.rb_node;
vgpu              284 drivers/gpu/drm/i915/gvt/kvmgt.c 	rb_insert_color(&new->gfn_node, &vgpu->vdev.gfn_cache);
vgpu              288 drivers/gpu/drm/i915/gvt/kvmgt.c 	link = &vgpu->vdev.dma_addr_cache.rb_node;
vgpu              299 drivers/gpu/drm/i915/gvt/kvmgt.c 	rb_insert_color(&new->dma_addr_node, &vgpu->vdev.dma_addr_cache);
vgpu              301 drivers/gpu/drm/i915/gvt/kvmgt.c 	vgpu->vdev.nr_cache_entries++;
vgpu              305 drivers/gpu/drm/i915/gvt/kvmgt.c static void __gvt_cache_remove_entry(struct intel_vgpu *vgpu,
vgpu              308 drivers/gpu/drm/i915/gvt/kvmgt.c 	rb_erase(&entry->gfn_node, &vgpu->vdev.gfn_cache);
vgpu              309 drivers/gpu/drm/i915/gvt/kvmgt.c 	rb_erase(&entry->dma_addr_node, &vgpu->vdev.dma_addr_cache);
vgpu              311 drivers/gpu/drm/i915/gvt/kvmgt.c 	vgpu->vdev.nr_cache_entries--;
vgpu              314 drivers/gpu/drm/i915/gvt/kvmgt.c static void gvt_cache_destroy(struct intel_vgpu *vgpu)
vgpu              320 drivers/gpu/drm/i915/gvt/kvmgt.c 		mutex_lock(&vgpu->vdev.cache_lock);
vgpu              321 drivers/gpu/drm/i915/gvt/kvmgt.c 		node = rb_first(&vgpu->vdev.gfn_cache);
vgpu              323 drivers/gpu/drm/i915/gvt/kvmgt.c 			mutex_unlock(&vgpu->vdev.cache_lock);
vgpu              327 drivers/gpu/drm/i915/gvt/kvmgt.c 		gvt_dma_unmap_page(vgpu, dma->gfn, dma->dma_addr, dma->size);
vgpu              328 drivers/gpu/drm/i915/gvt/kvmgt.c 		__gvt_cache_remove_entry(vgpu, dma);
vgpu              329 drivers/gpu/drm/i915/gvt/kvmgt.c 		mutex_unlock(&vgpu->vdev.cache_lock);
vgpu              333 drivers/gpu/drm/i915/gvt/kvmgt.c static void gvt_cache_init(struct intel_vgpu *vgpu)
vgpu              335 drivers/gpu/drm/i915/gvt/kvmgt.c 	vgpu->vdev.gfn_cache = RB_ROOT;
vgpu              336 drivers/gpu/drm/i915/gvt/kvmgt.c 	vgpu->vdev.dma_addr_cache = RB_ROOT;
vgpu              337 drivers/gpu/drm/i915/gvt/kvmgt.c 	vgpu->vdev.nr_cache_entries = 0;
vgpu              338 drivers/gpu/drm/i915/gvt/kvmgt.c 	mutex_init(&vgpu->vdev.cache_lock);
vgpu              409 drivers/gpu/drm/i915/gvt/kvmgt.c static size_t intel_vgpu_reg_rw_opregion(struct intel_vgpu *vgpu, char *buf,
vgpu              414 drivers/gpu/drm/i915/gvt/kvmgt.c 	void *base = vgpu->vdev.region[i].data;
vgpu              417 drivers/gpu/drm/i915/gvt/kvmgt.c 	if (pos >= vgpu->vdev.region[i].size || iswrite) {
vgpu              421 drivers/gpu/drm/i915/gvt/kvmgt.c 	count = min(count, (size_t)(vgpu->vdev.region[i].size - pos));
vgpu              427 drivers/gpu/drm/i915/gvt/kvmgt.c static void intel_vgpu_reg_release_opregion(struct intel_vgpu *vgpu,
vgpu              437 drivers/gpu/drm/i915/gvt/kvmgt.c static int handle_edid_regs(struct intel_vgpu *vgpu,
vgpu              463 drivers/gpu/drm/i915/gvt/kvmgt.c 				intel_gvt_ops->emulate_hotplug(vgpu, true);
vgpu              465 drivers/gpu/drm/i915/gvt/kvmgt.c 				intel_gvt_ops->emulate_hotplug(vgpu, false);
vgpu              508 drivers/gpu/drm/i915/gvt/kvmgt.c static size_t intel_vgpu_reg_rw_edid(struct intel_vgpu *vgpu, char *buf,
vgpu              515 drivers/gpu/drm/i915/gvt/kvmgt.c 		(struct vfio_edid_region *)vgpu->vdev.region[i].data;
vgpu              519 drivers/gpu/drm/i915/gvt/kvmgt.c 		ret = handle_edid_regs(vgpu, region, buf, count, pos, iswrite);
vgpu              531 drivers/gpu/drm/i915/gvt/kvmgt.c static void intel_vgpu_reg_release_edid(struct intel_vgpu *vgpu,
vgpu              542 drivers/gpu/drm/i915/gvt/kvmgt.c static int intel_vgpu_register_reg(struct intel_vgpu *vgpu,
vgpu              549 drivers/gpu/drm/i915/gvt/kvmgt.c 	region = krealloc(vgpu->vdev.region,
vgpu              550 drivers/gpu/drm/i915/gvt/kvmgt.c 			(vgpu->vdev.num_regions + 1) * sizeof(*region),
vgpu              555 drivers/gpu/drm/i915/gvt/kvmgt.c 	vgpu->vdev.region = region;
vgpu              556 drivers/gpu/drm/i915/gvt/kvmgt.c 	vgpu->vdev.region[vgpu->vdev.num_regions].type = type;
vgpu              557 drivers/gpu/drm/i915/gvt/kvmgt.c 	vgpu->vdev.region[vgpu->vdev.num_regions].subtype = subtype;
vgpu              558 drivers/gpu/drm/i915/gvt/kvmgt.c 	vgpu->vdev.region[vgpu->vdev.num_regions].ops = ops;
vgpu              559 drivers/gpu/drm/i915/gvt/kvmgt.c 	vgpu->vdev.region[vgpu->vdev.num_regions].size = size;
vgpu              560 drivers/gpu/drm/i915/gvt/kvmgt.c 	vgpu->vdev.region[vgpu->vdev.num_regions].flags = flags;
vgpu              561 drivers/gpu/drm/i915/gvt/kvmgt.c 	vgpu->vdev.region[vgpu->vdev.num_regions].data = data;
vgpu              562 drivers/gpu/drm/i915/gvt/kvmgt.c 	vgpu->vdev.num_regions++;
vgpu              568 drivers/gpu/drm/i915/gvt/kvmgt.c 	struct intel_vgpu *vgpu = (struct intel_vgpu *)p_vgpu;
vgpu              570 drivers/gpu/drm/i915/gvt/kvmgt.c 	vgpu->vdev.vfio_device = vfio_device_get_from_dev(
vgpu              571 drivers/gpu/drm/i915/gvt/kvmgt.c 		mdev_dev(vgpu->vdev.mdev));
vgpu              572 drivers/gpu/drm/i915/gvt/kvmgt.c 	if (!vgpu->vdev.vfio_device) {
vgpu              582 drivers/gpu/drm/i915/gvt/kvmgt.c 	struct intel_vgpu *vgpu = (struct intel_vgpu *)p_vgpu;
vgpu              590 drivers/gpu/drm/i915/gvt/kvmgt.c 	base = vgpu_opregion(vgpu)->va;
vgpu              599 drivers/gpu/drm/i915/gvt/kvmgt.c 	ret = intel_vgpu_register_reg(vgpu,
vgpu              610 drivers/gpu/drm/i915/gvt/kvmgt.c 	struct intel_vgpu *vgpu = (struct intel_vgpu *)p_vgpu;
vgpu              611 drivers/gpu/drm/i915/gvt/kvmgt.c 	struct intel_vgpu_port *port = intel_vgpu_port(vgpu, port_num);
vgpu              627 drivers/gpu/drm/i915/gvt/kvmgt.c 	ret = intel_vgpu_register_reg(vgpu,
vgpu              638 drivers/gpu/drm/i915/gvt/kvmgt.c static void kvmgt_put_vfio_device(void *vgpu)
vgpu              640 drivers/gpu/drm/i915/gvt/kvmgt.c 	if (WARN_ON(!((struct intel_vgpu *)vgpu)->vdev.vfio_device))
vgpu              643 drivers/gpu/drm/i915/gvt/kvmgt.c 	vfio_device_put(((struct intel_vgpu *)vgpu)->vdev.vfio_device);
vgpu              648 drivers/gpu/drm/i915/gvt/kvmgt.c 	struct intel_vgpu *vgpu = NULL;
vgpu              665 drivers/gpu/drm/i915/gvt/kvmgt.c 	vgpu = intel_gvt_ops->vgpu_create(gvt, type);
vgpu              666 drivers/gpu/drm/i915/gvt/kvmgt.c 	if (IS_ERR_OR_NULL(vgpu)) {
vgpu              667 drivers/gpu/drm/i915/gvt/kvmgt.c 		ret = vgpu == NULL ? -EFAULT : PTR_ERR(vgpu);
vgpu              672 drivers/gpu/drm/i915/gvt/kvmgt.c 	INIT_WORK(&vgpu->vdev.release_work, intel_vgpu_release_work);
vgpu              674 drivers/gpu/drm/i915/gvt/kvmgt.c 	vgpu->vdev.mdev = mdev;
vgpu              675 drivers/gpu/drm/i915/gvt/kvmgt.c 	mdev_set_drvdata(mdev, vgpu);
vgpu              687 drivers/gpu/drm/i915/gvt/kvmgt.c 	struct intel_vgpu *vgpu = mdev_get_drvdata(mdev);
vgpu              689 drivers/gpu/drm/i915/gvt/kvmgt.c 	if (handle_valid(vgpu->handle))
vgpu              692 drivers/gpu/drm/i915/gvt/kvmgt.c 	intel_gvt_ops->vgpu_destroy(vgpu);
vgpu              699 drivers/gpu/drm/i915/gvt/kvmgt.c 	struct intel_vgpu *vgpu = container_of(nb,
vgpu              711 drivers/gpu/drm/i915/gvt/kvmgt.c 		mutex_lock(&vgpu->vdev.cache_lock);
vgpu              713 drivers/gpu/drm/i915/gvt/kvmgt.c 			entry = __gvt_cache_find_gfn(vgpu, iov_pfn);
vgpu              717 drivers/gpu/drm/i915/gvt/kvmgt.c 			gvt_dma_unmap_page(vgpu, entry->gfn, entry->dma_addr,
vgpu              719 drivers/gpu/drm/i915/gvt/kvmgt.c 			__gvt_cache_remove_entry(vgpu, entry);
vgpu              721 drivers/gpu/drm/i915/gvt/kvmgt.c 		mutex_unlock(&vgpu->vdev.cache_lock);
vgpu              730 drivers/gpu/drm/i915/gvt/kvmgt.c 	struct intel_vgpu *vgpu = container_of(nb,
vgpu              736 drivers/gpu/drm/i915/gvt/kvmgt.c 		vgpu->vdev.kvm = data;
vgpu              739 drivers/gpu/drm/i915/gvt/kvmgt.c 			schedule_work(&vgpu->vdev.release_work);
vgpu              747 drivers/gpu/drm/i915/gvt/kvmgt.c 	struct intel_vgpu *vgpu = mdev_get_drvdata(mdev);
vgpu              751 drivers/gpu/drm/i915/gvt/kvmgt.c 	vgpu->vdev.iommu_notifier.notifier_call = intel_vgpu_iommu_notifier;
vgpu              752 drivers/gpu/drm/i915/gvt/kvmgt.c 	vgpu->vdev.group_notifier.notifier_call = intel_vgpu_group_notifier;
vgpu              756 drivers/gpu/drm/i915/gvt/kvmgt.c 				&vgpu->vdev.iommu_notifier);
vgpu              765 drivers/gpu/drm/i915/gvt/kvmgt.c 				&vgpu->vdev.group_notifier);
vgpu              782 drivers/gpu/drm/i915/gvt/kvmgt.c 	intel_gvt_ops->vgpu_activate(vgpu);
vgpu              784 drivers/gpu/drm/i915/gvt/kvmgt.c 	atomic_set(&vgpu->vdev.released, 0);
vgpu              789 drivers/gpu/drm/i915/gvt/kvmgt.c 					&vgpu->vdev.group_notifier);
vgpu              793 drivers/gpu/drm/i915/gvt/kvmgt.c 					&vgpu->vdev.iommu_notifier);
vgpu              798 drivers/gpu/drm/i915/gvt/kvmgt.c static void intel_vgpu_release_msi_eventfd_ctx(struct intel_vgpu *vgpu)
vgpu              802 drivers/gpu/drm/i915/gvt/kvmgt.c 	trigger = vgpu->vdev.msi_trigger;
vgpu              805 drivers/gpu/drm/i915/gvt/kvmgt.c 		vgpu->vdev.msi_trigger = NULL;
vgpu              809 drivers/gpu/drm/i915/gvt/kvmgt.c static void __intel_vgpu_release(struct intel_vgpu *vgpu)
vgpu              814 drivers/gpu/drm/i915/gvt/kvmgt.c 	if (!handle_valid(vgpu->handle))
vgpu              817 drivers/gpu/drm/i915/gvt/kvmgt.c 	if (atomic_cmpxchg(&vgpu->vdev.released, 0, 1))
vgpu              820 drivers/gpu/drm/i915/gvt/kvmgt.c 	intel_gvt_ops->vgpu_release(vgpu);
vgpu              822 drivers/gpu/drm/i915/gvt/kvmgt.c 	ret = vfio_unregister_notifier(mdev_dev(vgpu->vdev.mdev), VFIO_IOMMU_NOTIFY,
vgpu              823 drivers/gpu/drm/i915/gvt/kvmgt.c 					&vgpu->vdev.iommu_notifier);
vgpu              826 drivers/gpu/drm/i915/gvt/kvmgt.c 	ret = vfio_unregister_notifier(mdev_dev(vgpu->vdev.mdev), VFIO_GROUP_NOTIFY,
vgpu              827 drivers/gpu/drm/i915/gvt/kvmgt.c 					&vgpu->vdev.group_notifier);
vgpu              833 drivers/gpu/drm/i915/gvt/kvmgt.c 	info = (struct kvmgt_guest_info *)vgpu->handle;
vgpu              836 drivers/gpu/drm/i915/gvt/kvmgt.c 	intel_vgpu_release_msi_eventfd_ctx(vgpu);
vgpu              838 drivers/gpu/drm/i915/gvt/kvmgt.c 	vgpu->vdev.kvm = NULL;
vgpu              839 drivers/gpu/drm/i915/gvt/kvmgt.c 	vgpu->handle = 0;
vgpu              844 drivers/gpu/drm/i915/gvt/kvmgt.c 	struct intel_vgpu *vgpu = mdev_get_drvdata(mdev);
vgpu              846 drivers/gpu/drm/i915/gvt/kvmgt.c 	__intel_vgpu_release(vgpu);
vgpu              851 drivers/gpu/drm/i915/gvt/kvmgt.c 	struct intel_vgpu *vgpu = container_of(work, struct intel_vgpu,
vgpu              854 drivers/gpu/drm/i915/gvt/kvmgt.c 	__intel_vgpu_release(vgpu);
vgpu              857 drivers/gpu/drm/i915/gvt/kvmgt.c static u64 intel_vgpu_get_bar_addr(struct intel_vgpu *vgpu, int bar)
vgpu              862 drivers/gpu/drm/i915/gvt/kvmgt.c 	start_lo = (*(u32 *)(vgpu->cfg_space.virtual_cfg_space + bar)) &
vgpu              864 drivers/gpu/drm/i915/gvt/kvmgt.c 	mem_type = (*(u32 *)(vgpu->cfg_space.virtual_cfg_space + bar)) &
vgpu              869 drivers/gpu/drm/i915/gvt/kvmgt.c 		start_hi = (*(u32 *)(vgpu->cfg_space.virtual_cfg_space
vgpu              884 drivers/gpu/drm/i915/gvt/kvmgt.c static int intel_vgpu_bar_rw(struct intel_vgpu *vgpu, int bar, u64 off,
vgpu              887 drivers/gpu/drm/i915/gvt/kvmgt.c 	u64 bar_start = intel_vgpu_get_bar_addr(vgpu, bar);
vgpu              891 drivers/gpu/drm/i915/gvt/kvmgt.c 		ret = intel_gvt_ops->emulate_mmio_write(vgpu,
vgpu              894 drivers/gpu/drm/i915/gvt/kvmgt.c 		ret = intel_gvt_ops->emulate_mmio_read(vgpu,
vgpu              899 drivers/gpu/drm/i915/gvt/kvmgt.c static inline bool intel_vgpu_in_aperture(struct intel_vgpu *vgpu, u64 off)
vgpu              901 drivers/gpu/drm/i915/gvt/kvmgt.c 	return off >= vgpu_aperture_offset(vgpu) &&
vgpu              902 drivers/gpu/drm/i915/gvt/kvmgt.c 	       off < vgpu_aperture_offset(vgpu) + vgpu_aperture_sz(vgpu);
vgpu              905 drivers/gpu/drm/i915/gvt/kvmgt.c static int intel_vgpu_aperture_rw(struct intel_vgpu *vgpu, u64 off,
vgpu              910 drivers/gpu/drm/i915/gvt/kvmgt.c 	if (!intel_vgpu_in_aperture(vgpu, off) ||
vgpu              911 drivers/gpu/drm/i915/gvt/kvmgt.c 	    !intel_vgpu_in_aperture(vgpu, off + count)) {
vgpu              916 drivers/gpu/drm/i915/gvt/kvmgt.c 	aperture_va = io_mapping_map_wc(&vgpu->gvt->dev_priv->ggtt.iomap,
vgpu              935 drivers/gpu/drm/i915/gvt/kvmgt.c 	struct intel_vgpu *vgpu = mdev_get_drvdata(mdev);
vgpu              941 drivers/gpu/drm/i915/gvt/kvmgt.c 	if (index >= VFIO_PCI_NUM_REGIONS + vgpu->vdev.num_regions) {
vgpu              949 drivers/gpu/drm/i915/gvt/kvmgt.c 			ret = intel_gvt_ops->emulate_cfg_write(vgpu, pos,
vgpu              952 drivers/gpu/drm/i915/gvt/kvmgt.c 			ret = intel_gvt_ops->emulate_cfg_read(vgpu, pos,
vgpu              956 drivers/gpu/drm/i915/gvt/kvmgt.c 		ret = intel_vgpu_bar_rw(vgpu, PCI_BASE_ADDRESS_0, pos,
vgpu              960 drivers/gpu/drm/i915/gvt/kvmgt.c 		ret = intel_vgpu_aperture_rw(vgpu, pos, buf, count, is_write);
vgpu              970 drivers/gpu/drm/i915/gvt/kvmgt.c 		if (index >= VFIO_PCI_NUM_REGIONS + vgpu->vdev.num_regions)
vgpu              974 drivers/gpu/drm/i915/gvt/kvmgt.c 		return vgpu->vdev.region[index].ops->rw(vgpu, buf, count,
vgpu              983 drivers/gpu/drm/i915/gvt/kvmgt.c 	struct intel_vgpu *vgpu = mdev_get_drvdata(mdev);
vgpu              985 drivers/gpu/drm/i915/gvt/kvmgt.c 	struct intel_gvt *gvt = vgpu->gvt;
vgpu              993 drivers/gpu/drm/i915/gvt/kvmgt.c 		intel_vgpu_get_bar_gpa(vgpu, PCI_BASE_ADDRESS_0);
vgpu             1152 drivers/gpu/drm/i915/gvt/kvmgt.c 	struct intel_vgpu *vgpu = mdev_get_drvdata(mdev);
vgpu             1172 drivers/gpu/drm/i915/gvt/kvmgt.c 	if (!intel_vgpu_in_aperture(vgpu, req_start))
vgpu             1175 drivers/gpu/drm/i915/gvt/kvmgt.c 	    vgpu_aperture_offset(vgpu) + vgpu_aperture_sz(vgpu))
vgpu             1178 drivers/gpu/drm/i915/gvt/kvmgt.c 	pgoff = (gvt_aperture_pa_base(vgpu->gvt) >> PAGE_SHIFT) + pgoff;
vgpu             1183 drivers/gpu/drm/i915/gvt/kvmgt.c static int intel_vgpu_get_irq_count(struct intel_vgpu *vgpu, int type)
vgpu             1191 drivers/gpu/drm/i915/gvt/kvmgt.c static int intel_vgpu_set_intx_mask(struct intel_vgpu *vgpu,
vgpu             1199 drivers/gpu/drm/i915/gvt/kvmgt.c static int intel_vgpu_set_intx_unmask(struct intel_vgpu *vgpu,
vgpu             1206 drivers/gpu/drm/i915/gvt/kvmgt.c static int intel_vgpu_set_intx_trigger(struct intel_vgpu *vgpu,
vgpu             1213 drivers/gpu/drm/i915/gvt/kvmgt.c static int intel_vgpu_set_msi_trigger(struct intel_vgpu *vgpu,
vgpu             1227 drivers/gpu/drm/i915/gvt/kvmgt.c 		vgpu->vdev.msi_trigger = trigger;
vgpu             1229 drivers/gpu/drm/i915/gvt/kvmgt.c 		intel_vgpu_release_msi_eventfd_ctx(vgpu);
vgpu             1234 drivers/gpu/drm/i915/gvt/kvmgt.c static int intel_vgpu_set_irqs(struct intel_vgpu *vgpu, u32 flags,
vgpu             1238 drivers/gpu/drm/i915/gvt/kvmgt.c 	int (*func)(struct intel_vgpu *vgpu, unsigned int index,
vgpu             1272 drivers/gpu/drm/i915/gvt/kvmgt.c 	return func(vgpu, index, start, count, flags, data);
vgpu             1278 drivers/gpu/drm/i915/gvt/kvmgt.c 	struct intel_vgpu *vgpu = mdev_get_drvdata(mdev);
vgpu             1281 drivers/gpu/drm/i915/gvt/kvmgt.c 	gvt_dbg_core("vgpu%d ioctl, cmd: %d\n", vgpu->id, cmd);
vgpu             1297 drivers/gpu/drm/i915/gvt/kvmgt.c 				vgpu->vdev.num_regions;
vgpu             1323 drivers/gpu/drm/i915/gvt/kvmgt.c 			info.size = vgpu->gvt->device_info.cfg_space_size;
vgpu             1329 drivers/gpu/drm/i915/gvt/kvmgt.c 			info.size = vgpu->cfg_space.bar[info.index].size;
vgpu             1349 drivers/gpu/drm/i915/gvt/kvmgt.c 			info.size = gvt_aperture_sz(vgpu->gvt);
vgpu             1361 drivers/gpu/drm/i915/gvt/kvmgt.c 					PAGE_ALIGN(vgpu_aperture_offset(vgpu));
vgpu             1362 drivers/gpu/drm/i915/gvt/kvmgt.c 			sparse->areas[0].size = vgpu_aperture_sz(vgpu);
vgpu             1388 drivers/gpu/drm/i915/gvt/kvmgt.c 						vgpu->vdev.num_regions)
vgpu             1393 drivers/gpu/drm/i915/gvt/kvmgt.c 							vgpu->vdev.num_regions);
vgpu             1399 drivers/gpu/drm/i915/gvt/kvmgt.c 				info.size = vgpu->vdev.region[i].size;
vgpu             1400 drivers/gpu/drm/i915/gvt/kvmgt.c 				info.flags = vgpu->vdev.region[i].flags;
vgpu             1402 drivers/gpu/drm/i915/gvt/kvmgt.c 				cap_type.type = vgpu->vdev.region[i].type;
vgpu             1403 drivers/gpu/drm/i915/gvt/kvmgt.c 				cap_type.subtype = vgpu->vdev.region[i].subtype;
vgpu             1475 drivers/gpu/drm/i915/gvt/kvmgt.c 		info.count = intel_vgpu_get_irq_count(vgpu, info.index);
vgpu             1497 drivers/gpu/drm/i915/gvt/kvmgt.c 			int max = intel_vgpu_get_irq_count(vgpu, hdr.index);
vgpu             1513 drivers/gpu/drm/i915/gvt/kvmgt.c 		ret = intel_vgpu_set_irqs(vgpu, hdr.flags, hdr.index,
vgpu             1519 drivers/gpu/drm/i915/gvt/kvmgt.c 		intel_gvt_ops->vgpu_reset(vgpu);
vgpu             1532 drivers/gpu/drm/i915/gvt/kvmgt.c 		ret = intel_gvt_ops->vgpu_query_plane(vgpu, &dmabuf);
vgpu             1545 drivers/gpu/drm/i915/gvt/kvmgt.c 		dmabuf_fd = intel_gvt_ops->vgpu_get_dmabuf(vgpu, dmabuf_id);
vgpu             1560 drivers/gpu/drm/i915/gvt/kvmgt.c 		struct intel_vgpu *vgpu = (struct intel_vgpu *)
vgpu             1562 drivers/gpu/drm/i915/gvt/kvmgt.c 		return sprintf(buf, "%d\n", vgpu->id);
vgpu             1574 drivers/gpu/drm/i915/gvt/kvmgt.c 		struct intel_vgpu *vgpu = (struct intel_vgpu *)
vgpu             1577 drivers/gpu/drm/i915/gvt/kvmgt.c 			       vgpu->submission.shadow[0]->gem_context->hw_id);
vgpu             1710 drivers/gpu/drm/i915/gvt/kvmgt.c 		intel_gvt_ops->write_protect_handler(info->vgpu, gpa,
vgpu             1735 drivers/gpu/drm/i915/gvt/kvmgt.c static bool __kvmgt_vgpu_exist(struct intel_vgpu *vgpu, struct kvm *kvm)
vgpu             1742 drivers/gpu/drm/i915/gvt/kvmgt.c 	mutex_lock(&vgpu->gvt->lock);
vgpu             1743 drivers/gpu/drm/i915/gvt/kvmgt.c 	for_each_active_vgpu(vgpu->gvt, itr, id) {
vgpu             1754 drivers/gpu/drm/i915/gvt/kvmgt.c 	mutex_unlock(&vgpu->gvt->lock);
vgpu             1761 drivers/gpu/drm/i915/gvt/kvmgt.c 	struct intel_vgpu *vgpu;
vgpu             1764 drivers/gpu/drm/i915/gvt/kvmgt.c 	vgpu = mdev_get_drvdata(mdev);
vgpu             1765 drivers/gpu/drm/i915/gvt/kvmgt.c 	if (handle_valid(vgpu->handle))
vgpu             1768 drivers/gpu/drm/i915/gvt/kvmgt.c 	kvm = vgpu->vdev.kvm;
vgpu             1774 drivers/gpu/drm/i915/gvt/kvmgt.c 	if (__kvmgt_vgpu_exist(vgpu, kvm))
vgpu             1781 drivers/gpu/drm/i915/gvt/kvmgt.c 	vgpu->handle = (unsigned long)info;
vgpu             1782 drivers/gpu/drm/i915/gvt/kvmgt.c 	info->vgpu = vgpu;
vgpu             1787 drivers/gpu/drm/i915/gvt/kvmgt.c 	gvt_cache_init(vgpu);
vgpu             1789 drivers/gpu/drm/i915/gvt/kvmgt.c 	init_completion(&vgpu->vblank_done);
vgpu             1797 drivers/gpu/drm/i915/gvt/kvmgt.c 						0444, vgpu->debugfs,
vgpu             1798 drivers/gpu/drm/i915/gvt/kvmgt.c 						&vgpu->vdev.nr_cache_entries);
vgpu             1809 drivers/gpu/drm/i915/gvt/kvmgt.c 	gvt_cache_destroy(info->vgpu);
vgpu             1815 drivers/gpu/drm/i915/gvt/kvmgt.c static int kvmgt_attach_vgpu(void *vgpu, unsigned long *handle)
vgpu             1824 drivers/gpu/drm/i915/gvt/kvmgt.c 	struct intel_vgpu *vgpu = (struct intel_vgpu *)p_vgpu;
vgpu             1826 drivers/gpu/drm/i915/gvt/kvmgt.c 	if (!vgpu->vdev.region)
vgpu             1829 drivers/gpu/drm/i915/gvt/kvmgt.c 	for (i = 0; i < vgpu->vdev.num_regions; i++)
vgpu             1830 drivers/gpu/drm/i915/gvt/kvmgt.c 		if (vgpu->vdev.region[i].ops->release)
vgpu             1831 drivers/gpu/drm/i915/gvt/kvmgt.c 			vgpu->vdev.region[i].ops->release(vgpu,
vgpu             1832 drivers/gpu/drm/i915/gvt/kvmgt.c 					&vgpu->vdev.region[i]);
vgpu             1833 drivers/gpu/drm/i915/gvt/kvmgt.c 	vgpu->vdev.num_regions = 0;
vgpu             1834 drivers/gpu/drm/i915/gvt/kvmgt.c 	kfree(vgpu->vdev.region);
vgpu             1835 drivers/gpu/drm/i915/gvt/kvmgt.c 	vgpu->vdev.region = NULL;
vgpu             1841 drivers/gpu/drm/i915/gvt/kvmgt.c 	struct intel_vgpu *vgpu;
vgpu             1847 drivers/gpu/drm/i915/gvt/kvmgt.c 	vgpu = info->vgpu;
vgpu             1858 drivers/gpu/drm/i915/gvt/kvmgt.c 	if (vgpu->vdev.msi_trigger == NULL)
vgpu             1861 drivers/gpu/drm/i915/gvt/kvmgt.c 	if (eventfd_signal(vgpu->vdev.msi_trigger, 1) == 1)
vgpu             1888 drivers/gpu/drm/i915/gvt/kvmgt.c 	struct intel_vgpu *vgpu;
vgpu             1896 drivers/gpu/drm/i915/gvt/kvmgt.c 	vgpu = info->vgpu;
vgpu             1898 drivers/gpu/drm/i915/gvt/kvmgt.c 	mutex_lock(&info->vgpu->vdev.cache_lock);
vgpu             1900 drivers/gpu/drm/i915/gvt/kvmgt.c 	entry = __gvt_cache_find_gfn(info->vgpu, gfn);
vgpu             1902 drivers/gpu/drm/i915/gvt/kvmgt.c 		ret = gvt_dma_map_page(vgpu, gfn, dma_addr, size);
vgpu             1906 drivers/gpu/drm/i915/gvt/kvmgt.c 		ret = __gvt_cache_add(info->vgpu, gfn, *dma_addr, size);
vgpu             1911 drivers/gpu/drm/i915/gvt/kvmgt.c 		gvt_dma_unmap_page(vgpu, gfn, entry->dma_addr, entry->size);
vgpu             1912 drivers/gpu/drm/i915/gvt/kvmgt.c 		__gvt_cache_remove_entry(vgpu, entry);
vgpu             1914 drivers/gpu/drm/i915/gvt/kvmgt.c 		ret = gvt_dma_map_page(vgpu, gfn, dma_addr, size);
vgpu             1918 drivers/gpu/drm/i915/gvt/kvmgt.c 		ret = __gvt_cache_add(info->vgpu, gfn, *dma_addr, size);
vgpu             1926 drivers/gpu/drm/i915/gvt/kvmgt.c 	mutex_unlock(&info->vgpu->vdev.cache_lock);
vgpu             1930 drivers/gpu/drm/i915/gvt/kvmgt.c 	gvt_dma_unmap_page(vgpu, gfn, *dma_addr, size);
vgpu             1932 drivers/gpu/drm/i915/gvt/kvmgt.c 	mutex_unlock(&info->vgpu->vdev.cache_lock);
vgpu             1940 drivers/gpu/drm/i915/gvt/kvmgt.c 	gvt_dma_unmap_page(entry->vgpu, entry->gfn, entry->dma_addr,
vgpu             1942 drivers/gpu/drm/i915/gvt/kvmgt.c 	__gvt_cache_remove_entry(entry->vgpu, entry);
vgpu             1955 drivers/gpu/drm/i915/gvt/kvmgt.c 	mutex_lock(&info->vgpu->vdev.cache_lock);
vgpu             1956 drivers/gpu/drm/i915/gvt/kvmgt.c 	entry = __gvt_cache_find_dma_addr(info->vgpu, dma_addr);
vgpu             1959 drivers/gpu/drm/i915/gvt/kvmgt.c 	mutex_unlock(&info->vgpu->vdev.cache_lock);
vgpu               47 drivers/gpu/drm/i915/gvt/mmio.c int intel_vgpu_gpa_to_mmio_offset(struct intel_vgpu *vgpu, u64 gpa)
vgpu               49 drivers/gpu/drm/i915/gvt/mmio.c 	u64 gttmmio_gpa = intel_vgpu_get_bar_gpa(vgpu, PCI_BASE_ADDRESS_0);
vgpu               60 drivers/gpu/drm/i915/gvt/mmio.c static void failsafe_emulate_mmio_rw(struct intel_vgpu *vgpu, u64 pa,
vgpu               67 drivers/gpu/drm/i915/gvt/mmio.c 	if (!vgpu || !p_data)
vgpu               70 drivers/gpu/drm/i915/gvt/mmio.c 	gvt = vgpu->gvt;
vgpu               71 drivers/gpu/drm/i915/gvt/mmio.c 	mutex_lock(&vgpu->vgpu_lock);
vgpu               72 drivers/gpu/drm/i915/gvt/mmio.c 	offset = intel_vgpu_gpa_to_mmio_offset(vgpu, pa);
vgpu               75 drivers/gpu/drm/i915/gvt/mmio.c 			intel_vgpu_default_mmio_read(vgpu, offset, p_data,
vgpu               78 drivers/gpu/drm/i915/gvt/mmio.c 			intel_vgpu_default_mmio_write(vgpu, offset, p_data,
vgpu               82 drivers/gpu/drm/i915/gvt/mmio.c 		pt = vgpu->gtt.ggtt_mm->ggtt_mm.virtual_ggtt + offset;
vgpu               89 drivers/gpu/drm/i915/gvt/mmio.c 	mutex_unlock(&vgpu->vgpu_lock);
vgpu              102 drivers/gpu/drm/i915/gvt/mmio.c int intel_vgpu_emulate_mmio_read(struct intel_vgpu *vgpu, u64 pa,
vgpu              105 drivers/gpu/drm/i915/gvt/mmio.c 	struct intel_gvt *gvt = vgpu->gvt;
vgpu              109 drivers/gpu/drm/i915/gvt/mmio.c 	if (vgpu->failsafe) {
vgpu              110 drivers/gpu/drm/i915/gvt/mmio.c 		failsafe_emulate_mmio_rw(vgpu, pa, p_data, bytes, true);
vgpu              113 drivers/gpu/drm/i915/gvt/mmio.c 	mutex_lock(&vgpu->vgpu_lock);
vgpu              115 drivers/gpu/drm/i915/gvt/mmio.c 	offset = intel_vgpu_gpa_to_mmio_offset(vgpu, pa);
vgpu              128 drivers/gpu/drm/i915/gvt/mmio.c 		ret = intel_vgpu_emulate_ggtt_mmio_read(vgpu, offset,
vgpu              136 drivers/gpu/drm/i915/gvt/mmio.c 		ret = intel_gvt_hypervisor_read_gpa(vgpu, pa, p_data, bytes);
vgpu              148 drivers/gpu/drm/i915/gvt/mmio.c 	ret = intel_vgpu_mmio_reg_rw(vgpu, offset, p_data, bytes, true);
vgpu              160 drivers/gpu/drm/i915/gvt/mmio.c 	mutex_unlock(&vgpu->vgpu_lock);
vgpu              174 drivers/gpu/drm/i915/gvt/mmio.c int intel_vgpu_emulate_mmio_write(struct intel_vgpu *vgpu, u64 pa,
vgpu              177 drivers/gpu/drm/i915/gvt/mmio.c 	struct intel_gvt *gvt = vgpu->gvt;
vgpu              181 drivers/gpu/drm/i915/gvt/mmio.c 	if (vgpu->failsafe) {
vgpu              182 drivers/gpu/drm/i915/gvt/mmio.c 		failsafe_emulate_mmio_rw(vgpu, pa, p_data, bytes, false);
vgpu              186 drivers/gpu/drm/i915/gvt/mmio.c 	mutex_lock(&vgpu->vgpu_lock);
vgpu              188 drivers/gpu/drm/i915/gvt/mmio.c 	offset = intel_vgpu_gpa_to_mmio_offset(vgpu, pa);
vgpu              201 drivers/gpu/drm/i915/gvt/mmio.c 		ret = intel_vgpu_emulate_ggtt_mmio_write(vgpu, offset,
vgpu              209 drivers/gpu/drm/i915/gvt/mmio.c 		ret = intel_gvt_hypervisor_write_gpa(vgpu, pa, p_data, bytes);
vgpu              213 drivers/gpu/drm/i915/gvt/mmio.c 	ret = intel_vgpu_mmio_reg_rw(vgpu, offset, p_data, bytes, false);
vgpu              224 drivers/gpu/drm/i915/gvt/mmio.c 	mutex_unlock(&vgpu->vgpu_lock);
vgpu              234 drivers/gpu/drm/i915/gvt/mmio.c void intel_vgpu_reset_mmio(struct intel_vgpu *vgpu, bool dmlr)
vgpu              236 drivers/gpu/drm/i915/gvt/mmio.c 	struct intel_gvt *gvt = vgpu->gvt;
vgpu              241 drivers/gpu/drm/i915/gvt/mmio.c 		memcpy(vgpu->mmio.vreg, mmio, info->mmio_size);
vgpu              243 drivers/gpu/drm/i915/gvt/mmio.c 		vgpu_vreg_t(vgpu, GEN6_GT_THREAD_STATUS_REG) = 0;
vgpu              246 drivers/gpu/drm/i915/gvt/mmio.c 		vgpu_vreg_t(vgpu, GEN6_GT_CORE_STATUS) = 0;
vgpu              248 drivers/gpu/drm/i915/gvt/mmio.c 		if (IS_BROXTON(vgpu->gvt->dev_priv)) {
vgpu              249 drivers/gpu/drm/i915/gvt/mmio.c 			vgpu_vreg_t(vgpu, BXT_P_CR_GT_DISP_PWRON) &=
vgpu              251 drivers/gpu/drm/i915/gvt/mmio.c 			vgpu_vreg_t(vgpu, BXT_PORT_CL1CM_DW0(DPIO_PHY0)) &=
vgpu              253 drivers/gpu/drm/i915/gvt/mmio.c 			vgpu_vreg_t(vgpu, BXT_PORT_CL1CM_DW0(DPIO_PHY1)) &=
vgpu              255 drivers/gpu/drm/i915/gvt/mmio.c 			vgpu_vreg_t(vgpu, BXT_PHY_CTL_FAMILY(DPIO_PHY0)) &=
vgpu              257 drivers/gpu/drm/i915/gvt/mmio.c 			vgpu_vreg_t(vgpu, BXT_PHY_CTL_FAMILY(DPIO_PHY1)) &=
vgpu              259 drivers/gpu/drm/i915/gvt/mmio.c 			vgpu_vreg_t(vgpu, BXT_PHY_CTL(PORT_A)) &=
vgpu              261 drivers/gpu/drm/i915/gvt/mmio.c 			vgpu_vreg_t(vgpu, BXT_PHY_CTL(PORT_A)) |=
vgpu              264 drivers/gpu/drm/i915/gvt/mmio.c 			vgpu_vreg_t(vgpu, BXT_PHY_CTL(PORT_B)) &=
vgpu              266 drivers/gpu/drm/i915/gvt/mmio.c 			vgpu_vreg_t(vgpu, BXT_PHY_CTL(PORT_B)) |=
vgpu              269 drivers/gpu/drm/i915/gvt/mmio.c 			vgpu_vreg_t(vgpu, BXT_PHY_CTL(PORT_C)) &=
vgpu              271 drivers/gpu/drm/i915/gvt/mmio.c 			vgpu_vreg_t(vgpu, BXT_PHY_CTL(PORT_C)) |=
vgpu              281 drivers/gpu/drm/i915/gvt/mmio.c 		memcpy(vgpu->mmio.vreg, mmio, GVT_GEN8_MMIO_RESET_OFFSET);
vgpu              293 drivers/gpu/drm/i915/gvt/mmio.c int intel_vgpu_init_mmio(struct intel_vgpu *vgpu)
vgpu              295 drivers/gpu/drm/i915/gvt/mmio.c 	const struct intel_gvt_device_info *info = &vgpu->gvt->device_info;
vgpu              297 drivers/gpu/drm/i915/gvt/mmio.c 	vgpu->mmio.vreg = vzalloc(info->mmio_size);
vgpu              298 drivers/gpu/drm/i915/gvt/mmio.c 	if (!vgpu->mmio.vreg)
vgpu              301 drivers/gpu/drm/i915/gvt/mmio.c 	intel_vgpu_reset_mmio(vgpu, true);
vgpu              311 drivers/gpu/drm/i915/gvt/mmio.c void intel_vgpu_clean_mmio(struct intel_vgpu *vgpu)
vgpu              313 drivers/gpu/drm/i915/gvt/mmio.c 	vfree(vgpu->mmio.vreg);
vgpu              314 drivers/gpu/drm/i915/gvt/mmio.c 	vgpu->mmio.vreg = NULL;
vgpu               81 drivers/gpu/drm/i915/gvt/mmio.h int intel_vgpu_init_mmio(struct intel_vgpu *vgpu);
vgpu               82 drivers/gpu/drm/i915/gvt/mmio.h void intel_vgpu_reset_mmio(struct intel_vgpu *vgpu, bool dmlr);
vgpu               83 drivers/gpu/drm/i915/gvt/mmio.h void intel_vgpu_clean_mmio(struct intel_vgpu *vgpu);
vgpu               85 drivers/gpu/drm/i915/gvt/mmio.h int intel_vgpu_gpa_to_mmio_offset(struct intel_vgpu *vgpu, u64 gpa);
vgpu               87 drivers/gpu/drm/i915/gvt/mmio.h int intel_vgpu_emulate_mmio_read(struct intel_vgpu *vgpu, u64 pa,
vgpu               89 drivers/gpu/drm/i915/gvt/mmio.h int intel_vgpu_emulate_mmio_write(struct intel_vgpu *vgpu, u64 pa,
vgpu               92 drivers/gpu/drm/i915/gvt/mmio.h int intel_vgpu_default_mmio_read(struct intel_vgpu *vgpu, unsigned int offset,
vgpu               94 drivers/gpu/drm/i915/gvt/mmio.h int intel_vgpu_default_mmio_write(struct intel_vgpu *vgpu, unsigned int offset,
vgpu              100 drivers/gpu/drm/i915/gvt/mmio.h int intel_vgpu_mmio_reg_rw(struct intel_vgpu *vgpu, unsigned int offset,
vgpu              103 drivers/gpu/drm/i915/gvt/mmio.h int intel_vgpu_mask_mmio_write(struct intel_vgpu *vgpu, unsigned int offset,
vgpu              192 drivers/gpu/drm/i915/gvt/mmio_context.c restore_context_mmio_for_inhibit(struct intel_vgpu *vgpu,
vgpu              198 drivers/gpu/drm/i915/gvt/mmio_context.c 	struct intel_gvt *gvt = vgpu->gvt;
vgpu              221 drivers/gpu/drm/i915/gvt/mmio_context.c 		*cs++ = vgpu_vreg_t(vgpu, mmio->reg) |
vgpu              224 drivers/gpu/drm/i915/gvt/mmio_context.c 			      *(cs-2), *(cs-1), vgpu->id, ring_id);
vgpu              238 drivers/gpu/drm/i915/gvt/mmio_context.c restore_render_mocs_control_for_inhibit(struct intel_vgpu *vgpu,
vgpu              252 drivers/gpu/drm/i915/gvt/mmio_context.c 		*cs++ = vgpu_vreg_t(vgpu, GEN9_GFX_MOCS(index));
vgpu              254 drivers/gpu/drm/i915/gvt/mmio_context.c 			      *(cs-2), *(cs-1), vgpu->id, req->engine->id);
vgpu              265 drivers/gpu/drm/i915/gvt/mmio_context.c restore_render_mocs_l3cc_for_inhibit(struct intel_vgpu *vgpu,
vgpu              279 drivers/gpu/drm/i915/gvt/mmio_context.c 		*cs++ = vgpu_vreg_t(vgpu, GEN9_LNCFCMOCS(index));
vgpu              281 drivers/gpu/drm/i915/gvt/mmio_context.c 			      *(cs-2), *(cs-1), vgpu->id, req->engine->id);
vgpu              296 drivers/gpu/drm/i915/gvt/mmio_context.c int intel_vgpu_restore_inhibit_context(struct intel_vgpu *vgpu,
vgpu              310 drivers/gpu/drm/i915/gvt/mmio_context.c 	ret = restore_context_mmio_for_inhibit(vgpu, req);
vgpu              318 drivers/gpu/drm/i915/gvt/mmio_context.c 	ret = restore_render_mocs_control_for_inhibit(vgpu, req);
vgpu              322 drivers/gpu/drm/i915/gvt/mmio_context.c 	ret = restore_render_mocs_l3cc_for_inhibit(vgpu, req);
vgpu              346 drivers/gpu/drm/i915/gvt/mmio_context.c static void handle_tlb_pending_event(struct intel_vgpu *vgpu, int ring_id)
vgpu              348 drivers/gpu/drm/i915/gvt/mmio_context.c 	struct drm_i915_private *dev_priv = vgpu->gvt->dev_priv;
vgpu              350 drivers/gpu/drm/i915/gvt/mmio_context.c 	struct intel_vgpu_submission *s = &vgpu->submission;
vgpu              351 drivers/gpu/drm/i915/gvt/mmio_context.c 	u32 *regs = vgpu->gvt->engine_mmio_list.tlb_mmio_offset_list;
vgpu              352 drivers/gpu/drm/i915/gvt/mmio_context.c 	u32 cnt = vgpu->gvt->engine_mmio_list.tlb_mmio_offset_list_cnt;
vgpu              384 drivers/gpu/drm/i915/gvt/mmio_context.c 		vgpu_vreg_t(vgpu, reg) = 0;
vgpu               54 drivers/gpu/drm/i915/gvt/mmio_context.h int intel_vgpu_restore_inhibit_context(struct intel_vgpu *vgpu,
vgpu               80 drivers/gpu/drm/i915/gvt/mpt.h static inline int intel_gvt_hypervisor_attach_vgpu(struct intel_vgpu *vgpu)
vgpu               86 drivers/gpu/drm/i915/gvt/mpt.h 	return intel_gvt_host.mpt->attach_vgpu(vgpu, &vgpu->handle);
vgpu               96 drivers/gpu/drm/i915/gvt/mpt.h static inline void intel_gvt_hypervisor_detach_vgpu(struct intel_vgpu *vgpu)
vgpu              102 drivers/gpu/drm/i915/gvt/mpt.h 	intel_gvt_host.mpt->detach_vgpu(vgpu);
vgpu              116 drivers/gpu/drm/i915/gvt/mpt.h static inline int intel_gvt_hypervisor_inject_msi(struct intel_vgpu *vgpu)
vgpu              118 drivers/gpu/drm/i915/gvt/mpt.h 	unsigned long offset = vgpu->gvt->device_info.msi_cap_offset;
vgpu              123 drivers/gpu/drm/i915/gvt/mpt.h 	control = *(u16 *)(vgpu_cfg_space(vgpu) + MSI_CAP_CONTROL(offset));
vgpu              124 drivers/gpu/drm/i915/gvt/mpt.h 	addr = *(u32 *)(vgpu_cfg_space(vgpu) + MSI_CAP_ADDRESS(offset));
vgpu              125 drivers/gpu/drm/i915/gvt/mpt.h 	data = *(u16 *)(vgpu_cfg_space(vgpu) + MSI_CAP_DATA(offset));
vgpu              134 drivers/gpu/drm/i915/gvt/mpt.h 	trace_inject_msi(vgpu->id, addr, data);
vgpu              136 drivers/gpu/drm/i915/gvt/mpt.h 	ret = intel_gvt_host.mpt->inject_msi(vgpu->handle, addr, data);
vgpu              163 drivers/gpu/drm/i915/gvt/mpt.h 		struct intel_vgpu *vgpu, unsigned long gfn)
vgpu              165 drivers/gpu/drm/i915/gvt/mpt.h 	return intel_gvt_host.mpt->enable_page_track(vgpu->handle, gfn);
vgpu              177 drivers/gpu/drm/i915/gvt/mpt.h 		struct intel_vgpu *vgpu, unsigned long gfn)
vgpu              179 drivers/gpu/drm/i915/gvt/mpt.h 	return intel_gvt_host.mpt->disable_page_track(vgpu->handle, gfn);
vgpu              192 drivers/gpu/drm/i915/gvt/mpt.h static inline int intel_gvt_hypervisor_read_gpa(struct intel_vgpu *vgpu,
vgpu              195 drivers/gpu/drm/i915/gvt/mpt.h 	return intel_gvt_host.mpt->read_gpa(vgpu->handle, gpa, buf, len);
vgpu              208 drivers/gpu/drm/i915/gvt/mpt.h static inline int intel_gvt_hypervisor_write_gpa(struct intel_vgpu *vgpu,
vgpu              211 drivers/gpu/drm/i915/gvt/mpt.h 	return intel_gvt_host.mpt->write_gpa(vgpu->handle, gpa, buf, len);
vgpu              223 drivers/gpu/drm/i915/gvt/mpt.h 		struct intel_vgpu *vgpu, unsigned long gfn)
vgpu              225 drivers/gpu/drm/i915/gvt/mpt.h 	return intel_gvt_host.mpt->gfn_to_mfn(vgpu->handle, gfn);
vgpu              239 drivers/gpu/drm/i915/gvt/mpt.h 		struct intel_vgpu *vgpu, unsigned long gfn, unsigned long size,
vgpu              242 drivers/gpu/drm/i915/gvt/mpt.h 	return intel_gvt_host.mpt->dma_map_guest_page(vgpu->handle, gfn, size,
vgpu              252 drivers/gpu/drm/i915/gvt/mpt.h 		struct intel_vgpu *vgpu, dma_addr_t dma_addr)
vgpu              254 drivers/gpu/drm/i915/gvt/mpt.h 	intel_gvt_host.mpt->dma_unmap_guest_page(vgpu->handle, dma_addr);
vgpu              269 drivers/gpu/drm/i915/gvt/mpt.h 		struct intel_vgpu *vgpu, unsigned long gfn,
vgpu              277 drivers/gpu/drm/i915/gvt/mpt.h 	return intel_gvt_host.mpt->map_gfn_to_mfn(vgpu->handle, gfn, mfn, nr,
vgpu              292 drivers/gpu/drm/i915/gvt/mpt.h 		struct intel_vgpu *vgpu, u64 start, u64 end, bool map)
vgpu              298 drivers/gpu/drm/i915/gvt/mpt.h 	return intel_gvt_host.mpt->set_trap_area(vgpu->handle, start, end, map);
vgpu              308 drivers/gpu/drm/i915/gvt/mpt.h static inline int intel_gvt_hypervisor_set_opregion(struct intel_vgpu *vgpu)
vgpu              313 drivers/gpu/drm/i915/gvt/mpt.h 	return intel_gvt_host.mpt->set_opregion(vgpu);
vgpu              324 drivers/gpu/drm/i915/gvt/mpt.h static inline int intel_gvt_hypervisor_set_edid(struct intel_vgpu *vgpu,
vgpu              330 drivers/gpu/drm/i915/gvt/mpt.h 	return intel_gvt_host.mpt->set_edid(vgpu, port_num);
vgpu              340 drivers/gpu/drm/i915/gvt/mpt.h static inline int intel_gvt_hypervisor_get_vfio_device(struct intel_vgpu *vgpu)
vgpu              345 drivers/gpu/drm/i915/gvt/mpt.h 	return intel_gvt_host.mpt->get_vfio_device(vgpu);
vgpu              355 drivers/gpu/drm/i915/gvt/mpt.h static inline void intel_gvt_hypervisor_put_vfio_device(struct intel_vgpu *vgpu)
vgpu              360 drivers/gpu/drm/i915/gvt/mpt.h 	intel_gvt_host.mpt->put_vfio_device(vgpu);
vgpu              372 drivers/gpu/drm/i915/gvt/mpt.h 		struct intel_vgpu *vgpu, unsigned long gfn)
vgpu              377 drivers/gpu/drm/i915/gvt/mpt.h 	return intel_gvt_host.mpt->is_valid_gfn(vgpu->handle, gfn);
vgpu              221 drivers/gpu/drm/i915/gvt/opregion.c int intel_vgpu_init_opregion(struct intel_vgpu *vgpu)
vgpu              228 drivers/gpu/drm/i915/gvt/opregion.c 	gvt_dbg_core("init vgpu%d opregion\n", vgpu->id);
vgpu              229 drivers/gpu/drm/i915/gvt/opregion.c 	vgpu_opregion(vgpu)->va = (void *)__get_free_pages(GFP_KERNEL |
vgpu              232 drivers/gpu/drm/i915/gvt/opregion.c 	if (!vgpu_opregion(vgpu)->va) {
vgpu              238 drivers/gpu/drm/i915/gvt/opregion.c 	buf = (u8 *)vgpu_opregion(vgpu)->va;
vgpu              259 drivers/gpu/drm/i915/gvt/opregion.c static int map_vgpu_opregion(struct intel_vgpu *vgpu, bool map)
vgpu              265 drivers/gpu/drm/i915/gvt/opregion.c 		mfn = intel_gvt_hypervisor_virt_to_mfn(vgpu_opregion(vgpu)->va
vgpu              271 drivers/gpu/drm/i915/gvt/opregion.c 		ret = intel_gvt_hypervisor_map_gfn_to_mfn(vgpu,
vgpu              272 drivers/gpu/drm/i915/gvt/opregion.c 				vgpu_opregion(vgpu)->gfn[i],
vgpu              281 drivers/gpu/drm/i915/gvt/opregion.c 	vgpu_opregion(vgpu)->mapped = map;
vgpu              295 drivers/gpu/drm/i915/gvt/opregion.c int intel_vgpu_opregion_base_write_handler(struct intel_vgpu *vgpu, u32 gpa)
vgpu              305 drivers/gpu/drm/i915/gvt/opregion.c 			vgpu_opregion(vgpu)->gfn[i] = (gpa >> PAGE_SHIFT) + i;
vgpu              312 drivers/gpu/drm/i915/gvt/opregion.c 		if (vgpu_opregion(vgpu)->mapped)
vgpu              313 drivers/gpu/drm/i915/gvt/opregion.c 			map_vgpu_opregion(vgpu, false);
vgpu              316 drivers/gpu/drm/i915/gvt/opregion.c 			vgpu_opregion(vgpu)->gfn[i] = (gpa >> PAGE_SHIFT) + i;
vgpu              318 drivers/gpu/drm/i915/gvt/opregion.c 		ret = map_vgpu_opregion(vgpu, true);
vgpu              333 drivers/gpu/drm/i915/gvt/opregion.c void intel_vgpu_clean_opregion(struct intel_vgpu *vgpu)
vgpu              335 drivers/gpu/drm/i915/gvt/opregion.c 	gvt_dbg_core("vgpu%d: clean vgpu opregion\n", vgpu->id);
vgpu              337 drivers/gpu/drm/i915/gvt/opregion.c 	if (!vgpu_opregion(vgpu)->va)
vgpu              341 drivers/gpu/drm/i915/gvt/opregion.c 		if (vgpu_opregion(vgpu)->mapped)
vgpu              342 drivers/gpu/drm/i915/gvt/opregion.c 			map_vgpu_opregion(vgpu, false);
vgpu              346 drivers/gpu/drm/i915/gvt/opregion.c 	free_pages((unsigned long)vgpu_opregion(vgpu)->va,
vgpu              349 drivers/gpu/drm/i915/gvt/opregion.c 	vgpu_opregion(vgpu)->va = NULL;
vgpu              467 drivers/gpu/drm/i915/gvt/opregion.c int intel_vgpu_emulate_opregion_request(struct intel_vgpu *vgpu, u32 swsci)
vgpu              476 drivers/gpu/drm/i915/gvt/opregion.c 		scic = *((u32 *)vgpu_opregion(vgpu)->va +
vgpu              478 drivers/gpu/drm/i915/gvt/opregion.c 		parm = *((u32 *)vgpu_opregion(vgpu)->va +
vgpu              482 drivers/gpu/drm/i915/gvt/opregion.c 		scic_pa = (vgpu_opregion(vgpu)->gfn[0] << PAGE_SHIFT) +
vgpu              484 drivers/gpu/drm/i915/gvt/opregion.c 		parm_pa = (vgpu_opregion(vgpu)->gfn[0] << PAGE_SHIFT) +
vgpu              487 drivers/gpu/drm/i915/gvt/opregion.c 		ret = intel_gvt_hypervisor_read_gpa(vgpu, scic_pa,
vgpu              495 drivers/gpu/drm/i915/gvt/opregion.c 		ret = intel_gvt_hypervisor_read_gpa(vgpu, parm_pa,
vgpu              514 drivers/gpu/drm/i915/gvt/opregion.c 	if ((vgpu_cfg_space(vgpu)[INTEL_GVT_PCI_SWSCI]
vgpu              541 drivers/gpu/drm/i915/gvt/opregion.c 		*((u32 *)vgpu_opregion(vgpu)->va +
vgpu              543 drivers/gpu/drm/i915/gvt/opregion.c 		*((u32 *)vgpu_opregion(vgpu)->va +
vgpu              547 drivers/gpu/drm/i915/gvt/opregion.c 		ret = intel_gvt_hypervisor_write_gpa(vgpu, scic_pa,
vgpu              555 drivers/gpu/drm/i915/gvt/opregion.c 		ret = intel_gvt_hypervisor_write_gpa(vgpu, parm_pa,
vgpu               35 drivers/gpu/drm/i915/gvt/page_track.c 		struct intel_vgpu *vgpu, unsigned long gfn)
vgpu               37 drivers/gpu/drm/i915/gvt/page_track.c 	return radix_tree_lookup(&vgpu->page_track_tree, gfn);
vgpu               50 drivers/gpu/drm/i915/gvt/page_track.c int intel_vgpu_register_page_track(struct intel_vgpu *vgpu, unsigned long gfn,
vgpu               56 drivers/gpu/drm/i915/gvt/page_track.c 	track = intel_vgpu_find_page_track(vgpu, gfn);
vgpu               67 drivers/gpu/drm/i915/gvt/page_track.c 	ret = radix_tree_insert(&vgpu->page_track_tree, gfn, track);
vgpu               82 drivers/gpu/drm/i915/gvt/page_track.c void intel_vgpu_unregister_page_track(struct intel_vgpu *vgpu,
vgpu               87 drivers/gpu/drm/i915/gvt/page_track.c 	track = radix_tree_delete(&vgpu->page_track_tree, gfn);
vgpu               90 drivers/gpu/drm/i915/gvt/page_track.c 			intel_gvt_hypervisor_disable_page_track(vgpu, gfn);
vgpu              103 drivers/gpu/drm/i915/gvt/page_track.c int intel_vgpu_enable_page_track(struct intel_vgpu *vgpu, unsigned long gfn)
vgpu              108 drivers/gpu/drm/i915/gvt/page_track.c 	track = intel_vgpu_find_page_track(vgpu, gfn);
vgpu              115 drivers/gpu/drm/i915/gvt/page_track.c 	ret = intel_gvt_hypervisor_enable_page_track(vgpu, gfn);
vgpu              130 drivers/gpu/drm/i915/gvt/page_track.c int intel_vgpu_disable_page_track(struct intel_vgpu *vgpu, unsigned long gfn)
vgpu              135 drivers/gpu/drm/i915/gvt/page_track.c 	track = intel_vgpu_find_page_track(vgpu, gfn);
vgpu              142 drivers/gpu/drm/i915/gvt/page_track.c 	ret = intel_gvt_hypervisor_disable_page_track(vgpu, gfn);
vgpu              159 drivers/gpu/drm/i915/gvt/page_track.c int intel_vgpu_page_track_handler(struct intel_vgpu *vgpu, u64 gpa,
vgpu              165 drivers/gpu/drm/i915/gvt/page_track.c 	mutex_lock(&vgpu->vgpu_lock);
vgpu              167 drivers/gpu/drm/i915/gvt/page_track.c 	page_track = intel_vgpu_find_page_track(vgpu, gpa >> PAGE_SHIFT);
vgpu              173 drivers/gpu/drm/i915/gvt/page_track.c 	if (unlikely(vgpu->failsafe)) {
vgpu              175 drivers/gpu/drm/i915/gvt/page_track.c 		intel_vgpu_disable_page_track(vgpu, gpa >> PAGE_SHIFT);
vgpu              183 drivers/gpu/drm/i915/gvt/page_track.c 	mutex_unlock(&vgpu->vgpu_lock);
vgpu               42 drivers/gpu/drm/i915/gvt/page_track.h 		struct intel_vgpu *vgpu, unsigned long gfn);
vgpu               44 drivers/gpu/drm/i915/gvt/page_track.h int intel_vgpu_register_page_track(struct intel_vgpu *vgpu,
vgpu               47 drivers/gpu/drm/i915/gvt/page_track.h void intel_vgpu_unregister_page_track(struct intel_vgpu *vgpu,
vgpu               50 drivers/gpu/drm/i915/gvt/page_track.h int intel_vgpu_enable_page_track(struct intel_vgpu *vgpu, unsigned long gfn);
vgpu               51 drivers/gpu/drm/i915/gvt/page_track.h int intel_vgpu_disable_page_track(struct intel_vgpu *vgpu, unsigned long gfn);
vgpu               53 drivers/gpu/drm/i915/gvt/page_track.h int intel_vgpu_page_track_handler(struct intel_vgpu *vgpu, u64 gpa,
vgpu               37 drivers/gpu/drm/i915/gvt/sched_policy.c static bool vgpu_has_pending_workload(struct intel_vgpu *vgpu)
vgpu               42 drivers/gpu/drm/i915/gvt/sched_policy.c 	for_each_engine(engine, vgpu->gvt->dev_priv, i) {
vgpu               43 drivers/gpu/drm/i915/gvt/sched_policy.c 		if (!list_empty(workload_q_head(vgpu, i)))
vgpu               55 drivers/gpu/drm/i915/gvt/sched_policy.c 	struct intel_vgpu *vgpu;
vgpu               75 drivers/gpu/drm/i915/gvt/sched_policy.c static void vgpu_update_timeslice(struct intel_vgpu *vgpu, ktime_t cur_time)
vgpu               80 drivers/gpu/drm/i915/gvt/sched_policy.c 	if (!vgpu || vgpu == vgpu->gvt->idle_vgpu)
vgpu               83 drivers/gpu/drm/i915/gvt/sched_policy.c 	vgpu_data = vgpu->sched_data;
vgpu              179 drivers/gpu/drm/i915/gvt/sched_policy.c 	struct intel_vgpu *vgpu = NULL;
vgpu              187 drivers/gpu/drm/i915/gvt/sched_policy.c 		if (!vgpu_has_pending_workload(vgpu_data->vgpu))
vgpu              192 drivers/gpu/drm/i915/gvt/sched_policy.c 				vgpu = vgpu_data->vgpu;
vgpu              200 drivers/gpu/drm/i915/gvt/sched_policy.c 			vgpu = vgpu_data->vgpu;
vgpu              205 drivers/gpu/drm/i915/gvt/sched_policy.c 	return vgpu;
vgpu              216 drivers/gpu/drm/i915/gvt/sched_policy.c 	struct intel_vgpu *vgpu = NULL;
vgpu              222 drivers/gpu/drm/i915/gvt/sched_policy.c 	vgpu = find_busy_vgpu(sched_data);
vgpu              223 drivers/gpu/drm/i915/gvt/sched_policy.c 	if (vgpu) {
vgpu              224 drivers/gpu/drm/i915/gvt/sched_policy.c 		scheduler->next_vgpu = vgpu;
vgpu              225 drivers/gpu/drm/i915/gvt/sched_policy.c 		vgpu_data = vgpu->sched_data;
vgpu              311 drivers/gpu/drm/i915/gvt/sched_policy.c static int tbs_sched_init_vgpu(struct intel_vgpu *vgpu)
vgpu              319 drivers/gpu/drm/i915/gvt/sched_policy.c 	data->sched_ctl.weight = vgpu->sched_ctl.weight;
vgpu              320 drivers/gpu/drm/i915/gvt/sched_policy.c 	data->vgpu = vgpu;
vgpu              323 drivers/gpu/drm/i915/gvt/sched_policy.c 	vgpu->sched_data = data;
vgpu              328 drivers/gpu/drm/i915/gvt/sched_policy.c static void tbs_sched_clean_vgpu(struct intel_vgpu *vgpu)
vgpu              330 drivers/gpu/drm/i915/gvt/sched_policy.c 	struct intel_gvt *gvt = vgpu->gvt;
vgpu              333 drivers/gpu/drm/i915/gvt/sched_policy.c 	kfree(vgpu->sched_data);
vgpu              334 drivers/gpu/drm/i915/gvt/sched_policy.c 	vgpu->sched_data = NULL;
vgpu              341 drivers/gpu/drm/i915/gvt/sched_policy.c static void tbs_sched_start_schedule(struct intel_vgpu *vgpu)
vgpu              343 drivers/gpu/drm/i915/gvt/sched_policy.c 	struct gvt_sched_data *sched_data = vgpu->gvt->scheduler.sched_data;
vgpu              344 drivers/gpu/drm/i915/gvt/sched_policy.c 	struct vgpu_sched_data *vgpu_data = vgpu->sched_data;
vgpu              363 drivers/gpu/drm/i915/gvt/sched_policy.c static void tbs_sched_stop_schedule(struct intel_vgpu *vgpu)
vgpu              365 drivers/gpu/drm/i915/gvt/sched_policy.c 	struct vgpu_sched_data *vgpu_data = vgpu->sched_data;
vgpu              406 drivers/gpu/drm/i915/gvt/sched_policy.c int intel_vgpu_init_sched_policy(struct intel_vgpu *vgpu)
vgpu              410 drivers/gpu/drm/i915/gvt/sched_policy.c 	mutex_lock(&vgpu->gvt->sched_lock);
vgpu              411 drivers/gpu/drm/i915/gvt/sched_policy.c 	ret = vgpu->gvt->scheduler.sched_ops->init_vgpu(vgpu);
vgpu              412 drivers/gpu/drm/i915/gvt/sched_policy.c 	mutex_unlock(&vgpu->gvt->sched_lock);
vgpu              417 drivers/gpu/drm/i915/gvt/sched_policy.c void intel_vgpu_clean_sched_policy(struct intel_vgpu *vgpu)
vgpu              419 drivers/gpu/drm/i915/gvt/sched_policy.c 	mutex_lock(&vgpu->gvt->sched_lock);
vgpu              420 drivers/gpu/drm/i915/gvt/sched_policy.c 	vgpu->gvt->scheduler.sched_ops->clean_vgpu(vgpu);
vgpu              421 drivers/gpu/drm/i915/gvt/sched_policy.c 	mutex_unlock(&vgpu->gvt->sched_lock);
vgpu              424 drivers/gpu/drm/i915/gvt/sched_policy.c void intel_vgpu_start_schedule(struct intel_vgpu *vgpu)
vgpu              426 drivers/gpu/drm/i915/gvt/sched_policy.c 	struct vgpu_sched_data *vgpu_data = vgpu->sched_data;
vgpu              428 drivers/gpu/drm/i915/gvt/sched_policy.c 	mutex_lock(&vgpu->gvt->sched_lock);
vgpu              430 drivers/gpu/drm/i915/gvt/sched_policy.c 		gvt_dbg_core("vgpu%d: start schedule\n", vgpu->id);
vgpu              431 drivers/gpu/drm/i915/gvt/sched_policy.c 		vgpu->gvt->scheduler.sched_ops->start_schedule(vgpu);
vgpu              433 drivers/gpu/drm/i915/gvt/sched_policy.c 	mutex_unlock(&vgpu->gvt->sched_lock);
vgpu              443 drivers/gpu/drm/i915/gvt/sched_policy.c void intel_vgpu_stop_schedule(struct intel_vgpu *vgpu)
vgpu              446 drivers/gpu/drm/i915/gvt/sched_policy.c 		&vgpu->gvt->scheduler;
vgpu              448 drivers/gpu/drm/i915/gvt/sched_policy.c 	struct vgpu_sched_data *vgpu_data = vgpu->sched_data;
vgpu              449 drivers/gpu/drm/i915/gvt/sched_policy.c 	struct drm_i915_private *dev_priv = vgpu->gvt->dev_priv;
vgpu              454 drivers/gpu/drm/i915/gvt/sched_policy.c 	gvt_dbg_core("vgpu%d: stop schedule\n", vgpu->id);
vgpu              456 drivers/gpu/drm/i915/gvt/sched_policy.c 	mutex_lock(&vgpu->gvt->sched_lock);
vgpu              457 drivers/gpu/drm/i915/gvt/sched_policy.c 	scheduler->sched_ops->stop_schedule(vgpu);
vgpu              459 drivers/gpu/drm/i915/gvt/sched_policy.c 	if (scheduler->next_vgpu == vgpu)
vgpu              462 drivers/gpu/drm/i915/gvt/sched_policy.c 	if (scheduler->current_vgpu == vgpu) {
vgpu              471 drivers/gpu/drm/i915/gvt/sched_policy.c 		if (scheduler->engine_owner[ring_id] == vgpu) {
vgpu              472 drivers/gpu/drm/i915/gvt/sched_policy.c 			intel_gvt_switch_mmio(vgpu, NULL, ring_id);
vgpu              478 drivers/gpu/drm/i915/gvt/sched_policy.c 	mutex_unlock(&vgpu->gvt->sched_lock);
vgpu               40 drivers/gpu/drm/i915/gvt/sched_policy.h 	int (*init_vgpu)(struct intel_vgpu *vgpu);
vgpu               41 drivers/gpu/drm/i915/gvt/sched_policy.h 	void (*clean_vgpu)(struct intel_vgpu *vgpu);
vgpu               42 drivers/gpu/drm/i915/gvt/sched_policy.h 	void (*start_schedule)(struct intel_vgpu *vgpu);
vgpu               43 drivers/gpu/drm/i915/gvt/sched_policy.h 	void (*stop_schedule)(struct intel_vgpu *vgpu);
vgpu               52 drivers/gpu/drm/i915/gvt/sched_policy.h int intel_vgpu_init_sched_policy(struct intel_vgpu *vgpu);
vgpu               54 drivers/gpu/drm/i915/gvt/sched_policy.h void intel_vgpu_clean_sched_policy(struct intel_vgpu *vgpu);
vgpu               56 drivers/gpu/drm/i915/gvt/sched_policy.h void intel_vgpu_start_schedule(struct intel_vgpu *vgpu);
vgpu               58 drivers/gpu/drm/i915/gvt/sched_policy.h void intel_vgpu_stop_schedule(struct intel_vgpu *vgpu);
vgpu               86 drivers/gpu/drm/i915/gvt/scheduler.c 	struct drm_i915_private *dev_priv = workload->vgpu->gvt->dev_priv;
vgpu              128 drivers/gpu/drm/i915/gvt/scheduler.c 	struct intel_vgpu *vgpu = workload->vgpu;
vgpu              129 drivers/gpu/drm/i915/gvt/scheduler.c 	struct intel_gvt *gvt = vgpu->gvt;
vgpu              144 drivers/gpu/drm/i915/gvt/scheduler.c 	intel_gvt_hypervisor_read_gpa(vgpu, workload->ring_context_gpa \
vgpu              147 drivers/gpu/drm/i915/gvt/scheduler.c 		intel_gvt_hypervisor_read_gpa(vgpu, workload->ring_context_gpa \
vgpu              164 drivers/gpu/drm/i915/gvt/scheduler.c 	intel_gvt_hypervisor_read_gpa(vgpu,
vgpu              189 drivers/gpu/drm/i915/gvt/scheduler.c 		context_gpa = intel_vgpu_gma_to_gpa(vgpu->gtt.ggtt_mm,
vgpu              199 drivers/gpu/drm/i915/gvt/scheduler.c 		intel_gvt_hypervisor_read_gpa(vgpu, context_gpa, dst,
vgpu              212 drivers/gpu/drm/i915/gvt/scheduler.c static void save_ring_hw_state(struct intel_vgpu *vgpu, int ring_id)
vgpu              214 drivers/gpu/drm/i915/gvt/scheduler.c 	struct drm_i915_private *dev_priv = vgpu->gvt->dev_priv;
vgpu              219 drivers/gpu/drm/i915/gvt/scheduler.c 	vgpu_vreg(vgpu, i915_mmio_reg_offset(reg)) = I915_READ_FW(reg);
vgpu              221 drivers/gpu/drm/i915/gvt/scheduler.c 	vgpu_vreg(vgpu, i915_mmio_reg_offset(reg)) = I915_READ_FW(reg);
vgpu              223 drivers/gpu/drm/i915/gvt/scheduler.c 	vgpu_vreg(vgpu, i915_mmio_reg_offset(reg)) = I915_READ_FW(reg);
vgpu              258 drivers/gpu/drm/i915/gvt/scheduler.c 		if (workload->vgpu != scheduler->engine_owner[ring_id]) {
vgpu              261 drivers/gpu/drm/i915/gvt/scheduler.c 					      workload->vgpu, ring_id);
vgpu              262 drivers/gpu/drm/i915/gvt/scheduler.c 			scheduler->engine_owner[ring_id] = workload->vgpu;
vgpu              265 drivers/gpu/drm/i915/gvt/scheduler.c 				      ring_id, workload->vgpu->id);
vgpu              270 drivers/gpu/drm/i915/gvt/scheduler.c 		save_ring_hw_state(workload->vgpu, ring_id);
vgpu              274 drivers/gpu/drm/i915/gvt/scheduler.c 		save_ring_hw_state(workload->vgpu, ring_id);
vgpu              303 drivers/gpu/drm/i915/gvt/scheduler.c 	struct intel_vgpu *vgpu = workload->vgpu;
vgpu              310 drivers/gpu/drm/i915/gvt/scheduler.c 		intel_vgpu_restore_inhibit_context(vgpu, req);
vgpu              390 drivers/gpu/drm/i915/gvt/scheduler.c 	struct intel_vgpu *vgpu = workload->vgpu;
vgpu              391 drivers/gpu/drm/i915/gvt/scheduler.c 	struct intel_vgpu_submission *s = &vgpu->submission;
vgpu              392 drivers/gpu/drm/i915/gvt/scheduler.c 	struct drm_i915_private *dev_priv = vgpu->gvt->dev_priv;
vgpu              420 drivers/gpu/drm/i915/gvt/scheduler.c 	struct intel_vgpu *vgpu = workload->vgpu;
vgpu              421 drivers/gpu/drm/i915/gvt/scheduler.c 	struct intel_vgpu_submission *s = &vgpu->submission;
vgpu              422 drivers/gpu/drm/i915/gvt/scheduler.c 	struct drm_i915_private *dev_priv = vgpu->gvt->dev_priv;
vgpu              455 drivers/gpu/drm/i915/gvt/scheduler.c 	struct intel_gvt *gvt = workload->vgpu->gvt;
vgpu              577 drivers/gpu/drm/i915/gvt/scheduler.c 	struct intel_vgpu *vgpu = workload->vgpu;
vgpu              578 drivers/gpu/drm/i915/gvt/scheduler.c 	struct drm_i915_private *dev_priv = vgpu->gvt->dev_priv;
vgpu              582 drivers/gpu/drm/i915/gvt/scheduler.c 	vgpu_vreg_t(vgpu, RING_START(ring_base)) = workload->rb_start;
vgpu              587 drivers/gpu/drm/i915/gvt/scheduler.c 	struct intel_vgpu *vgpu = workload->vgpu;
vgpu              588 drivers/gpu/drm/i915/gvt/scheduler.c 	struct drm_i915_private *dev_priv = vgpu->gvt->dev_priv;
vgpu              622 drivers/gpu/drm/i915/gvt/scheduler.c 	struct intel_vgpu *vgpu = workload->vgpu;
vgpu              623 drivers/gpu/drm/i915/gvt/scheduler.c 	struct intel_vgpu_submission *s = &vgpu->submission;
vgpu              643 drivers/gpu/drm/i915/gvt/scheduler.c 	ret = intel_vgpu_sync_oos_pages(workload->vgpu);
vgpu              649 drivers/gpu/drm/i915/gvt/scheduler.c 	ret = intel_vgpu_flush_post_shadow(workload->vgpu);
vgpu              691 drivers/gpu/drm/i915/gvt/scheduler.c 	struct intel_vgpu *vgpu = workload->vgpu;
vgpu              692 drivers/gpu/drm/i915/gvt/scheduler.c 	struct drm_i915_private *dev_priv = vgpu->gvt->dev_priv;
vgpu              700 drivers/gpu/drm/i915/gvt/scheduler.c 	mutex_lock(&vgpu->vgpu_lock);
vgpu              737 drivers/gpu/drm/i915/gvt/scheduler.c 	mutex_unlock(&vgpu->vgpu_lock);
vgpu              792 drivers/gpu/drm/i915/gvt/scheduler.c 	atomic_inc(&workload->vgpu->submission.running_workload_num);
vgpu              801 drivers/gpu/drm/i915/gvt/scheduler.c 	struct intel_vgpu *vgpu = workload->vgpu;
vgpu              802 drivers/gpu/drm/i915/gvt/scheduler.c 	struct intel_gvt *gvt = vgpu->gvt;
vgpu              831 drivers/gpu/drm/i915/gvt/scheduler.c 	vgpu_vreg_t(vgpu, RING_TAIL(ring_base)) = tail;
vgpu              832 drivers/gpu/drm/i915/gvt/scheduler.c 	vgpu_vreg_t(vgpu, RING_HEAD(ring_base)) = head;
vgpu              843 drivers/gpu/drm/i915/gvt/scheduler.c 		context_gpa = intel_vgpu_gma_to_gpa(vgpu->gtt.ggtt_mm,
vgpu              853 drivers/gpu/drm/i915/gvt/scheduler.c 		intel_gvt_hypervisor_write_gpa(vgpu, context_gpa, src,
vgpu              859 drivers/gpu/drm/i915/gvt/scheduler.c 	intel_gvt_hypervisor_write_gpa(vgpu, workload->ring_context_gpa +
vgpu              866 drivers/gpu/drm/i915/gvt/scheduler.c 	intel_gvt_hypervisor_write_gpa(vgpu, workload->ring_context_gpa + \
vgpu              874 drivers/gpu/drm/i915/gvt/scheduler.c 	intel_gvt_hypervisor_write_gpa(vgpu,
vgpu              884 drivers/gpu/drm/i915/gvt/scheduler.c void intel_vgpu_clean_workloads(struct intel_vgpu *vgpu,
vgpu              887 drivers/gpu/drm/i915/gvt/scheduler.c 	struct intel_vgpu_submission *s = &vgpu->submission;
vgpu              888 drivers/gpu/drm/i915/gvt/scheduler.c 	struct drm_i915_private *dev_priv = vgpu->gvt->dev_priv;
vgpu              909 drivers/gpu/drm/i915/gvt/scheduler.c 	struct intel_vgpu *vgpu = workload->vgpu;
vgpu              910 drivers/gpu/drm/i915/gvt/scheduler.c 	struct intel_vgpu_submission *s = &vgpu->submission;
vgpu              914 drivers/gpu/drm/i915/gvt/scheduler.c 	mutex_lock(&vgpu->vgpu_lock);
vgpu              938 drivers/gpu/drm/i915/gvt/scheduler.c 		    !(vgpu->resetting_eng & BIT(ring_id))) {
vgpu              943 drivers/gpu/drm/i915/gvt/scheduler.c 				intel_vgpu_trigger_virtual_event(vgpu, event);
vgpu              956 drivers/gpu/drm/i915/gvt/scheduler.c 	if (workload->status || vgpu->resetting_eng & BIT(ring_id)) {
vgpu              970 drivers/gpu/drm/i915/gvt/scheduler.c 		intel_vgpu_clean_workloads(vgpu, BIT(ring_id));
vgpu              982 drivers/gpu/drm/i915/gvt/scheduler.c 	mutex_unlock(&vgpu->vgpu_lock);
vgpu              997 drivers/gpu/drm/i915/gvt/scheduler.c 	struct intel_vgpu *vgpu = NULL;
vgpu             1023 drivers/gpu/drm/i915/gvt/scheduler.c 				workload->vgpu->id);
vgpu             1044 drivers/gpu/drm/i915/gvt/scheduler.c 			vgpu = workload->vgpu;
vgpu             1065 drivers/gpu/drm/i915/gvt/scheduler.c 			enter_failsafe_mode(vgpu, GVT_FAILSAFE_GUEST_ERR);
vgpu             1070 drivers/gpu/drm/i915/gvt/scheduler.c void intel_gvt_wait_vgpu_idle(struct intel_vgpu *vgpu)
vgpu             1072 drivers/gpu/drm/i915/gvt/scheduler.c 	struct intel_vgpu_submission *s = &vgpu->submission;
vgpu             1073 drivers/gpu/drm/i915/gvt/scheduler.c 	struct intel_gvt *gvt = vgpu->gvt;
vgpu             1170 drivers/gpu/drm/i915/gvt/scheduler.c void intel_vgpu_clean_submission(struct intel_vgpu *vgpu)
vgpu             1172 drivers/gpu/drm/i915/gvt/scheduler.c 	struct intel_vgpu_submission *s = &vgpu->submission;
vgpu             1176 drivers/gpu/drm/i915/gvt/scheduler.c 	intel_vgpu_select_submission_ops(vgpu, ALL_ENGINES, 0);
vgpu             1179 drivers/gpu/drm/i915/gvt/scheduler.c 	for_each_engine(engine, vgpu->gvt->dev_priv, id)
vgpu             1194 drivers/gpu/drm/i915/gvt/scheduler.c void intel_vgpu_reset_submission(struct intel_vgpu *vgpu,
vgpu             1197 drivers/gpu/drm/i915/gvt/scheduler.c 	struct intel_vgpu_submission *s = &vgpu->submission;
vgpu             1202 drivers/gpu/drm/i915/gvt/scheduler.c 	intel_vgpu_clean_workloads(vgpu, engine_mask);
vgpu             1203 drivers/gpu/drm/i915/gvt/scheduler.c 	s->ops->reset(vgpu, engine_mask);
vgpu             1234 drivers/gpu/drm/i915/gvt/scheduler.c int intel_vgpu_setup_submission(struct intel_vgpu *vgpu)
vgpu             1236 drivers/gpu/drm/i915/gvt/scheduler.c 	struct drm_i915_private *i915 = vgpu->gvt->dev_priv;
vgpu             1237 drivers/gpu/drm/i915/gvt/scheduler.c 	struct intel_vgpu_submission *s = &vgpu->submission;
vgpu             1329 drivers/gpu/drm/i915/gvt/scheduler.c int intel_vgpu_select_submission_ops(struct intel_vgpu *vgpu,
vgpu             1333 drivers/gpu/drm/i915/gvt/scheduler.c 	struct intel_vgpu_submission *s = &vgpu->submission;
vgpu             1347 drivers/gpu/drm/i915/gvt/scheduler.c 		s->ops->clean(vgpu, engine_mask);
vgpu             1353 drivers/gpu/drm/i915/gvt/scheduler.c 		gvt_dbg_core("vgpu%d: remove submission ops\n", vgpu->id);
vgpu             1357 drivers/gpu/drm/i915/gvt/scheduler.c 	ret = ops[interface]->init(vgpu, engine_mask);
vgpu             1366 drivers/gpu/drm/i915/gvt/scheduler.c 			vgpu->id, s->ops->name);
vgpu             1380 drivers/gpu/drm/i915/gvt/scheduler.c 	struct intel_vgpu_submission *s = &workload->vgpu->submission;
vgpu             1392 drivers/gpu/drm/i915/gvt/scheduler.c alloc_workload(struct intel_vgpu *vgpu)
vgpu             1394 drivers/gpu/drm/i915/gvt/scheduler.c 	struct intel_vgpu_submission *s = &vgpu->submission;
vgpu             1408 drivers/gpu/drm/i915/gvt/scheduler.c 	workload->vgpu = vgpu;
vgpu             1416 drivers/gpu/drm/i915/gvt/scheduler.c static void read_guest_pdps(struct intel_vgpu *vgpu,
vgpu             1425 drivers/gpu/drm/i915/gvt/scheduler.c 		intel_gvt_hypervisor_read_gpa(vgpu,
vgpu             1433 drivers/gpu/drm/i915/gvt/scheduler.c 	struct intel_vgpu *vgpu = workload->vgpu;
vgpu             1449 drivers/gpu/drm/i915/gvt/scheduler.c 	read_guest_pdps(workload->vgpu, workload->ring_context_gpa, (void *)pdps);
vgpu             1451 drivers/gpu/drm/i915/gvt/scheduler.c 	mm = intel_vgpu_get_ppgtt_mm(workload->vgpu, root_entry_type, pdps);
vgpu             1476 drivers/gpu/drm/i915/gvt/scheduler.c intel_vgpu_create_workload(struct intel_vgpu *vgpu, int ring_id,
vgpu             1479 drivers/gpu/drm/i915/gvt/scheduler.c 	struct intel_vgpu_submission *s = &vgpu->submission;
vgpu             1480 drivers/gpu/drm/i915/gvt/scheduler.c 	struct list_head *q = workload_q_head(vgpu, ring_id);
vgpu             1483 drivers/gpu/drm/i915/gvt/scheduler.c 	struct drm_i915_private *dev_priv = vgpu->gvt->dev_priv;
vgpu             1489 drivers/gpu/drm/i915/gvt/scheduler.c 	ring_context_gpa = intel_vgpu_gma_to_gpa(vgpu->gtt.ggtt_mm,
vgpu             1496 drivers/gpu/drm/i915/gvt/scheduler.c 	intel_gvt_hypervisor_read_gpa(vgpu, ring_context_gpa +
vgpu             1499 drivers/gpu/drm/i915/gvt/scheduler.c 	intel_gvt_hypervisor_read_gpa(vgpu, ring_context_gpa +
vgpu             1526 drivers/gpu/drm/i915/gvt/scheduler.c 	intel_gvt_hypervisor_read_gpa(vgpu, ring_context_gpa +
vgpu             1528 drivers/gpu/drm/i915/gvt/scheduler.c 	intel_gvt_hypervisor_read_gpa(vgpu, ring_context_gpa +
vgpu             1530 drivers/gpu/drm/i915/gvt/scheduler.c 	intel_gvt_hypervisor_read_gpa(vgpu, ring_context_gpa +
vgpu             1533 drivers/gpu/drm/i915/gvt/scheduler.c 	if (!intel_gvt_ggtt_validate_range(vgpu, start,
vgpu             1539 drivers/gpu/drm/i915/gvt/scheduler.c 	workload = alloc_workload(vgpu);
vgpu             1553 drivers/gpu/drm/i915/gvt/scheduler.c 		intel_gvt_hypervisor_read_gpa(vgpu, ring_context_gpa +
vgpu             1555 drivers/gpu/drm/i915/gvt/scheduler.c 		intel_gvt_hypervisor_read_gpa(vgpu, ring_context_gpa +
vgpu             1565 drivers/gpu/drm/i915/gvt/scheduler.c 			if (!intel_gvt_ggtt_validate_range(vgpu,
vgpu             1579 drivers/gpu/drm/i915/gvt/scheduler.c 			if (!intel_gvt_ggtt_validate_range(vgpu,
vgpu             1602 drivers/gpu/drm/i915/gvt/scheduler.c 	if (list_empty(workload_q_head(vgpu, ring_id))) {
vgpu             1612 drivers/gpu/drm/i915/gvt/scheduler.c 			enter_failsafe_mode(vgpu, GVT_FAILSAFE_GUEST_ERR);
vgpu             1627 drivers/gpu/drm/i915/gvt/scheduler.c 		workload_q_head(workload->vgpu, workload->ring_id));
vgpu             1628 drivers/gpu/drm/i915/gvt/scheduler.c 	intel_gvt_kick_schedule(workload->vgpu->gvt);
vgpu             1629 drivers/gpu/drm/i915/gvt/scheduler.c 	wake_up(&workload->vgpu->gvt->scheduler.waitq[workload->ring_id]);
vgpu               81 drivers/gpu/drm/i915/gvt/scheduler.h 	struct intel_vgpu *vgpu;
vgpu              132 drivers/gpu/drm/i915/gvt/scheduler.h #define workload_q_head(vgpu, ring_id) \
vgpu              133 drivers/gpu/drm/i915/gvt/scheduler.h 	(&(vgpu->submission.workload_q_head[ring_id]))
vgpu              141 drivers/gpu/drm/i915/gvt/scheduler.h void intel_gvt_wait_vgpu_idle(struct intel_vgpu *vgpu);
vgpu              143 drivers/gpu/drm/i915/gvt/scheduler.h int intel_vgpu_setup_submission(struct intel_vgpu *vgpu);
vgpu              145 drivers/gpu/drm/i915/gvt/scheduler.h void intel_vgpu_reset_submission(struct intel_vgpu *vgpu,
vgpu              148 drivers/gpu/drm/i915/gvt/scheduler.h void intel_vgpu_clean_submission(struct intel_vgpu *vgpu);
vgpu              150 drivers/gpu/drm/i915/gvt/scheduler.h int intel_vgpu_select_submission_ops(struct intel_vgpu *vgpu,
vgpu              158 drivers/gpu/drm/i915/gvt/scheduler.h intel_vgpu_create_workload(struct intel_vgpu *vgpu, int ring_id,
vgpu              163 drivers/gpu/drm/i915/gvt/scheduler.h void intel_vgpu_clean_workloads(struct intel_vgpu *vgpu,
vgpu               38 drivers/gpu/drm/i915/gvt/vgpu.c void populate_pvinfo_page(struct intel_vgpu *vgpu)
vgpu               41 drivers/gpu/drm/i915/gvt/vgpu.c 	vgpu_vreg64_t(vgpu, vgtif_reg(magic)) = VGT_MAGIC;
vgpu               42 drivers/gpu/drm/i915/gvt/vgpu.c 	vgpu_vreg_t(vgpu, vgtif_reg(version_major)) = 1;
vgpu               43 drivers/gpu/drm/i915/gvt/vgpu.c 	vgpu_vreg_t(vgpu, vgtif_reg(version_minor)) = 0;
vgpu               44 drivers/gpu/drm/i915/gvt/vgpu.c 	vgpu_vreg_t(vgpu, vgtif_reg(display_ready)) = 0;
vgpu               45 drivers/gpu/drm/i915/gvt/vgpu.c 	vgpu_vreg_t(vgpu, vgtif_reg(vgt_id)) = vgpu->id;
vgpu               47 drivers/gpu/drm/i915/gvt/vgpu.c 	vgpu_vreg_t(vgpu, vgtif_reg(vgt_caps)) = VGT_CAPS_FULL_PPGTT;
vgpu               48 drivers/gpu/drm/i915/gvt/vgpu.c 	vgpu_vreg_t(vgpu, vgtif_reg(vgt_caps)) |= VGT_CAPS_HWSP_EMULATION;
vgpu               49 drivers/gpu/drm/i915/gvt/vgpu.c 	vgpu_vreg_t(vgpu, vgtif_reg(vgt_caps)) |= VGT_CAPS_HUGE_GTT;
vgpu               51 drivers/gpu/drm/i915/gvt/vgpu.c 	vgpu_vreg_t(vgpu, vgtif_reg(avail_rs.mappable_gmadr.base)) =
vgpu               52 drivers/gpu/drm/i915/gvt/vgpu.c 		vgpu_aperture_gmadr_base(vgpu);
vgpu               53 drivers/gpu/drm/i915/gvt/vgpu.c 	vgpu_vreg_t(vgpu, vgtif_reg(avail_rs.mappable_gmadr.size)) =
vgpu               54 drivers/gpu/drm/i915/gvt/vgpu.c 		vgpu_aperture_sz(vgpu);
vgpu               55 drivers/gpu/drm/i915/gvt/vgpu.c 	vgpu_vreg_t(vgpu, vgtif_reg(avail_rs.nonmappable_gmadr.base)) =
vgpu               56 drivers/gpu/drm/i915/gvt/vgpu.c 		vgpu_hidden_gmadr_base(vgpu);
vgpu               57 drivers/gpu/drm/i915/gvt/vgpu.c 	vgpu_vreg_t(vgpu, vgtif_reg(avail_rs.nonmappable_gmadr.size)) =
vgpu               58 drivers/gpu/drm/i915/gvt/vgpu.c 		vgpu_hidden_sz(vgpu);
vgpu               60 drivers/gpu/drm/i915/gvt/vgpu.c 	vgpu_vreg_t(vgpu, vgtif_reg(avail_rs.fence_num)) = vgpu_fence_sz(vgpu);
vgpu               62 drivers/gpu/drm/i915/gvt/vgpu.c 	vgpu_vreg_t(vgpu, vgtif_reg(cursor_x_hot)) = UINT_MAX;
vgpu               63 drivers/gpu/drm/i915/gvt/vgpu.c 	vgpu_vreg_t(vgpu, vgtif_reg(cursor_y_hot)) = UINT_MAX;
vgpu               65 drivers/gpu/drm/i915/gvt/vgpu.c 	gvt_dbg_core("Populate PVINFO PAGE for vGPU %d\n", vgpu->id);
vgpu               67 drivers/gpu/drm/i915/gvt/vgpu.c 		vgpu_aperture_gmadr_base(vgpu), vgpu_aperture_sz(vgpu));
vgpu               69 drivers/gpu/drm/i915/gvt/vgpu.c 		vgpu_hidden_gmadr_base(vgpu), vgpu_hidden_sz(vgpu));
vgpu               70 drivers/gpu/drm/i915/gvt/vgpu.c 	gvt_dbg_core("fence size %d\n", vgpu_fence_sz(vgpu));
vgpu              213 drivers/gpu/drm/i915/gvt/vgpu.c void intel_gvt_activate_vgpu(struct intel_vgpu *vgpu)
vgpu              215 drivers/gpu/drm/i915/gvt/vgpu.c 	mutex_lock(&vgpu->gvt->lock);
vgpu              216 drivers/gpu/drm/i915/gvt/vgpu.c 	vgpu->active = true;
vgpu              217 drivers/gpu/drm/i915/gvt/vgpu.c 	mutex_unlock(&vgpu->gvt->lock);
vgpu              228 drivers/gpu/drm/i915/gvt/vgpu.c void intel_gvt_deactivate_vgpu(struct intel_vgpu *vgpu)
vgpu              230 drivers/gpu/drm/i915/gvt/vgpu.c 	mutex_lock(&vgpu->vgpu_lock);
vgpu              232 drivers/gpu/drm/i915/gvt/vgpu.c 	vgpu->active = false;
vgpu              234 drivers/gpu/drm/i915/gvt/vgpu.c 	if (atomic_read(&vgpu->submission.running_workload_num)) {
vgpu              235 drivers/gpu/drm/i915/gvt/vgpu.c 		mutex_unlock(&vgpu->vgpu_lock);
vgpu              236 drivers/gpu/drm/i915/gvt/vgpu.c 		intel_gvt_wait_vgpu_idle(vgpu);
vgpu              237 drivers/gpu/drm/i915/gvt/vgpu.c 		mutex_lock(&vgpu->vgpu_lock);
vgpu              240 drivers/gpu/drm/i915/gvt/vgpu.c 	intel_vgpu_stop_schedule(vgpu);
vgpu              242 drivers/gpu/drm/i915/gvt/vgpu.c 	mutex_unlock(&vgpu->vgpu_lock);
vgpu              254 drivers/gpu/drm/i915/gvt/vgpu.c void intel_gvt_release_vgpu(struct intel_vgpu *vgpu)
vgpu              256 drivers/gpu/drm/i915/gvt/vgpu.c 	intel_gvt_deactivate_vgpu(vgpu);
vgpu              258 drivers/gpu/drm/i915/gvt/vgpu.c 	mutex_lock(&vgpu->vgpu_lock);
vgpu              259 drivers/gpu/drm/i915/gvt/vgpu.c 	intel_vgpu_clean_workloads(vgpu, ALL_ENGINES);
vgpu              260 drivers/gpu/drm/i915/gvt/vgpu.c 	intel_vgpu_dmabuf_cleanup(vgpu);
vgpu              261 drivers/gpu/drm/i915/gvt/vgpu.c 	mutex_unlock(&vgpu->vgpu_lock);
vgpu              271 drivers/gpu/drm/i915/gvt/vgpu.c void intel_gvt_destroy_vgpu(struct intel_vgpu *vgpu)
vgpu              273 drivers/gpu/drm/i915/gvt/vgpu.c 	struct intel_gvt *gvt = vgpu->gvt;
vgpu              275 drivers/gpu/drm/i915/gvt/vgpu.c 	WARN(vgpu->active, "vGPU is still active!\n");
vgpu              282 drivers/gpu/drm/i915/gvt/vgpu.c 	idr_remove(&gvt->vgpu_idr, vgpu->id);
vgpu              285 drivers/gpu/drm/i915/gvt/vgpu.c 	mutex_lock(&vgpu->vgpu_lock);
vgpu              286 drivers/gpu/drm/i915/gvt/vgpu.c 	intel_gvt_debugfs_remove_vgpu(vgpu);
vgpu              287 drivers/gpu/drm/i915/gvt/vgpu.c 	intel_vgpu_clean_sched_policy(vgpu);
vgpu              288 drivers/gpu/drm/i915/gvt/vgpu.c 	intel_vgpu_clean_submission(vgpu);
vgpu              289 drivers/gpu/drm/i915/gvt/vgpu.c 	intel_vgpu_clean_display(vgpu);
vgpu              290 drivers/gpu/drm/i915/gvt/vgpu.c 	intel_vgpu_clean_opregion(vgpu);
vgpu              291 drivers/gpu/drm/i915/gvt/vgpu.c 	intel_vgpu_reset_ggtt(vgpu, true);
vgpu              292 drivers/gpu/drm/i915/gvt/vgpu.c 	intel_vgpu_clean_gtt(vgpu);
vgpu              293 drivers/gpu/drm/i915/gvt/vgpu.c 	intel_gvt_hypervisor_detach_vgpu(vgpu);
vgpu              294 drivers/gpu/drm/i915/gvt/vgpu.c 	intel_vgpu_free_resource(vgpu);
vgpu              295 drivers/gpu/drm/i915/gvt/vgpu.c 	intel_vgpu_clean_mmio(vgpu);
vgpu              296 drivers/gpu/drm/i915/gvt/vgpu.c 	intel_vgpu_dmabuf_cleanup(vgpu);
vgpu              297 drivers/gpu/drm/i915/gvt/vgpu.c 	mutex_unlock(&vgpu->vgpu_lock);
vgpu              305 drivers/gpu/drm/i915/gvt/vgpu.c 	vfree(vgpu);
vgpu              321 drivers/gpu/drm/i915/gvt/vgpu.c 	struct intel_vgpu *vgpu;
vgpu              325 drivers/gpu/drm/i915/gvt/vgpu.c 	vgpu = vzalloc(sizeof(*vgpu));
vgpu              326 drivers/gpu/drm/i915/gvt/vgpu.c 	if (!vgpu)
vgpu              329 drivers/gpu/drm/i915/gvt/vgpu.c 	vgpu->id = IDLE_VGPU_IDR;
vgpu              330 drivers/gpu/drm/i915/gvt/vgpu.c 	vgpu->gvt = gvt;
vgpu              331 drivers/gpu/drm/i915/gvt/vgpu.c 	mutex_init(&vgpu->vgpu_lock);
vgpu              334 drivers/gpu/drm/i915/gvt/vgpu.c 		INIT_LIST_HEAD(&vgpu->submission.workload_q_head[i]);
vgpu              336 drivers/gpu/drm/i915/gvt/vgpu.c 	ret = intel_vgpu_init_sched_policy(vgpu);
vgpu              340 drivers/gpu/drm/i915/gvt/vgpu.c 	vgpu->active = false;
vgpu              342 drivers/gpu/drm/i915/gvt/vgpu.c 	return vgpu;
vgpu              345 drivers/gpu/drm/i915/gvt/vgpu.c 	vfree(vgpu);
vgpu              356 drivers/gpu/drm/i915/gvt/vgpu.c void intel_gvt_destroy_idle_vgpu(struct intel_vgpu *vgpu)
vgpu              358 drivers/gpu/drm/i915/gvt/vgpu.c 	mutex_lock(&vgpu->vgpu_lock);
vgpu              359 drivers/gpu/drm/i915/gvt/vgpu.c 	intel_vgpu_clean_sched_policy(vgpu);
vgpu              360 drivers/gpu/drm/i915/gvt/vgpu.c 	mutex_unlock(&vgpu->vgpu_lock);
vgpu              362 drivers/gpu/drm/i915/gvt/vgpu.c 	vfree(vgpu);
vgpu              368 drivers/gpu/drm/i915/gvt/vgpu.c 	struct intel_vgpu *vgpu;
vgpu              375 drivers/gpu/drm/i915/gvt/vgpu.c 	vgpu = vzalloc(sizeof(*vgpu));
vgpu              376 drivers/gpu/drm/i915/gvt/vgpu.c 	if (!vgpu)
vgpu              379 drivers/gpu/drm/i915/gvt/vgpu.c 	ret = idr_alloc(&gvt->vgpu_idr, vgpu, IDLE_VGPU_IDR + 1, GVT_MAX_VGPU,
vgpu              384 drivers/gpu/drm/i915/gvt/vgpu.c 	vgpu->id = ret;
vgpu              385 drivers/gpu/drm/i915/gvt/vgpu.c 	vgpu->handle = param->handle;
vgpu              386 drivers/gpu/drm/i915/gvt/vgpu.c 	vgpu->gvt = gvt;
vgpu              387 drivers/gpu/drm/i915/gvt/vgpu.c 	vgpu->sched_ctl.weight = param->weight;
vgpu              388 drivers/gpu/drm/i915/gvt/vgpu.c 	mutex_init(&vgpu->vgpu_lock);
vgpu              389 drivers/gpu/drm/i915/gvt/vgpu.c 	mutex_init(&vgpu->dmabuf_lock);
vgpu              390 drivers/gpu/drm/i915/gvt/vgpu.c 	INIT_LIST_HEAD(&vgpu->dmabuf_obj_list_head);
vgpu              391 drivers/gpu/drm/i915/gvt/vgpu.c 	INIT_RADIX_TREE(&vgpu->page_track_tree, GFP_KERNEL);
vgpu              392 drivers/gpu/drm/i915/gvt/vgpu.c 	idr_init(&vgpu->object_idr);
vgpu              393 drivers/gpu/drm/i915/gvt/vgpu.c 	intel_vgpu_init_cfg_space(vgpu, param->primary);
vgpu              395 drivers/gpu/drm/i915/gvt/vgpu.c 	ret = intel_vgpu_init_mmio(vgpu);
vgpu              399 drivers/gpu/drm/i915/gvt/vgpu.c 	ret = intel_vgpu_alloc_resource(vgpu, param);
vgpu              403 drivers/gpu/drm/i915/gvt/vgpu.c 	populate_pvinfo_page(vgpu);
vgpu              405 drivers/gpu/drm/i915/gvt/vgpu.c 	ret = intel_gvt_hypervisor_attach_vgpu(vgpu);
vgpu              409 drivers/gpu/drm/i915/gvt/vgpu.c 	ret = intel_vgpu_init_gtt(vgpu);
vgpu              413 drivers/gpu/drm/i915/gvt/vgpu.c 	ret = intel_vgpu_init_opregion(vgpu);
vgpu              417 drivers/gpu/drm/i915/gvt/vgpu.c 	ret = intel_vgpu_init_display(vgpu, param->resolution);
vgpu              421 drivers/gpu/drm/i915/gvt/vgpu.c 	ret = intel_vgpu_setup_submission(vgpu);
vgpu              425 drivers/gpu/drm/i915/gvt/vgpu.c 	ret = intel_vgpu_init_sched_policy(vgpu);
vgpu              429 drivers/gpu/drm/i915/gvt/vgpu.c 	intel_gvt_debugfs_add_vgpu(vgpu);
vgpu              431 drivers/gpu/drm/i915/gvt/vgpu.c 	ret = intel_gvt_hypervisor_set_opregion(vgpu);
vgpu              437 drivers/gpu/drm/i915/gvt/vgpu.c 		ret = intel_gvt_hypervisor_set_edid(vgpu, PORT_D);
vgpu              441 drivers/gpu/drm/i915/gvt/vgpu.c 	return vgpu;
vgpu              444 drivers/gpu/drm/i915/gvt/vgpu.c 	intel_vgpu_clean_sched_policy(vgpu);
vgpu              446 drivers/gpu/drm/i915/gvt/vgpu.c 	intel_vgpu_clean_submission(vgpu);
vgpu              448 drivers/gpu/drm/i915/gvt/vgpu.c 	intel_vgpu_clean_display(vgpu);
vgpu              450 drivers/gpu/drm/i915/gvt/vgpu.c 	intel_vgpu_clean_opregion(vgpu);
vgpu              452 drivers/gpu/drm/i915/gvt/vgpu.c 	intel_vgpu_clean_gtt(vgpu);
vgpu              454 drivers/gpu/drm/i915/gvt/vgpu.c 	intel_gvt_hypervisor_detach_vgpu(vgpu);
vgpu              456 drivers/gpu/drm/i915/gvt/vgpu.c 	intel_vgpu_free_resource(vgpu);
vgpu              458 drivers/gpu/drm/i915/gvt/vgpu.c 	intel_vgpu_clean_mmio(vgpu);
vgpu              460 drivers/gpu/drm/i915/gvt/vgpu.c 	idr_remove(&gvt->vgpu_idr, vgpu->id);
vgpu              462 drivers/gpu/drm/i915/gvt/vgpu.c 	vfree(vgpu);
vgpu              480 drivers/gpu/drm/i915/gvt/vgpu.c 	struct intel_vgpu *vgpu;
vgpu              495 drivers/gpu/drm/i915/gvt/vgpu.c 	vgpu = __intel_gvt_create_vgpu(gvt, &param);
vgpu              496 drivers/gpu/drm/i915/gvt/vgpu.c 	if (!IS_ERR(vgpu))
vgpu              501 drivers/gpu/drm/i915/gvt/vgpu.c 	return vgpu;
vgpu              532 drivers/gpu/drm/i915/gvt/vgpu.c void intel_gvt_reset_vgpu_locked(struct intel_vgpu *vgpu, bool dmlr,
vgpu              535 drivers/gpu/drm/i915/gvt/vgpu.c 	struct intel_gvt *gvt = vgpu->gvt;
vgpu              541 drivers/gpu/drm/i915/gvt/vgpu.c 		     vgpu->id, dmlr, engine_mask);
vgpu              543 drivers/gpu/drm/i915/gvt/vgpu.c 	vgpu->resetting_eng = resetting_eng;
vgpu              545 drivers/gpu/drm/i915/gvt/vgpu.c 	intel_vgpu_stop_schedule(vgpu);
vgpu              551 drivers/gpu/drm/i915/gvt/vgpu.c 		mutex_unlock(&vgpu->vgpu_lock);
vgpu              552 drivers/gpu/drm/i915/gvt/vgpu.c 		intel_gvt_wait_vgpu_idle(vgpu);
vgpu              553 drivers/gpu/drm/i915/gvt/vgpu.c 		mutex_lock(&vgpu->vgpu_lock);
vgpu              556 drivers/gpu/drm/i915/gvt/vgpu.c 	intel_vgpu_reset_submission(vgpu, resetting_eng);
vgpu              559 drivers/gpu/drm/i915/gvt/vgpu.c 		intel_vgpu_select_submission_ops(vgpu, ALL_ENGINES, 0);
vgpu              560 drivers/gpu/drm/i915/gvt/vgpu.c 		intel_vgpu_invalidate_ppgtt(vgpu);
vgpu              563 drivers/gpu/drm/i915/gvt/vgpu.c 			intel_vgpu_reset_gtt(vgpu);
vgpu              564 drivers/gpu/drm/i915/gvt/vgpu.c 			intel_vgpu_reset_resource(vgpu);
vgpu              567 drivers/gpu/drm/i915/gvt/vgpu.c 		intel_vgpu_reset_mmio(vgpu, dmlr);
vgpu              568 drivers/gpu/drm/i915/gvt/vgpu.c 		populate_pvinfo_page(vgpu);
vgpu              571 drivers/gpu/drm/i915/gvt/vgpu.c 			intel_vgpu_reset_display(vgpu);
vgpu              572 drivers/gpu/drm/i915/gvt/vgpu.c 			intel_vgpu_reset_cfg_space(vgpu);
vgpu              574 drivers/gpu/drm/i915/gvt/vgpu.c 			vgpu->failsafe = false;
vgpu              575 drivers/gpu/drm/i915/gvt/vgpu.c 			vgpu->pv_notified = false;
vgpu              579 drivers/gpu/drm/i915/gvt/vgpu.c 	vgpu->resetting_eng = 0;
vgpu              580 drivers/gpu/drm/i915/gvt/vgpu.c 	gvt_dbg_core("reset vgpu%d done\n", vgpu->id);
vgpu              591 drivers/gpu/drm/i915/gvt/vgpu.c void intel_gvt_reset_vgpu(struct intel_vgpu *vgpu)
vgpu              593 drivers/gpu/drm/i915/gvt/vgpu.c 	mutex_lock(&vgpu->vgpu_lock);
vgpu              594 drivers/gpu/drm/i915/gvt/vgpu.c 	intel_gvt_reset_vgpu_locked(vgpu, true, 0);
vgpu              595 drivers/gpu/drm/i915/gvt/vgpu.c 	mutex_unlock(&vgpu->vgpu_lock);
vgpu             1316 drivers/gpu/drm/i915/i915_drv.h 	struct i915_virtual_gpu vgpu;
vgpu             2235 drivers/gpu/drm/i915/i915_drv.h 	return dev_priv->vgpu.active;
vgpu              841 drivers/gpu/drm/i915/i915_gem_gtt.c 	mutex_lock(&dev_priv->vgpu.lock);
vgpu              866 drivers/gpu/drm/i915/i915_gem_gtt.c 	mutex_unlock(&dev_priv->vgpu.lock);
vgpu               94 drivers/gpu/drm/i915/i915_vgpu.c 	dev_priv->vgpu.caps = readl(shared_area + vgtif_offset(vgt_caps));
vgpu               96 drivers/gpu/drm/i915/i915_vgpu.c 	dev_priv->vgpu.active = true;
vgpu               97 drivers/gpu/drm/i915/i915_vgpu.c 	mutex_init(&dev_priv->vgpu.lock);
vgpu              106 drivers/gpu/drm/i915/i915_vgpu.c 	return dev_priv->vgpu.caps & VGT_CAPS_FULL_PPGTT;
vgpu               37 drivers/gpu/drm/i915/i915_vgpu.h 	return dev_priv->vgpu.caps & VGT_CAPS_HWSP_EMULATION;
vgpu               43 drivers/gpu/drm/i915/i915_vgpu.h 	return dev_priv->vgpu.caps & VGT_CAPS_HUGE_GTT;