vcn               938 drivers/gpu/drm/amd/amdgpu/amdgpu.h 	struct amdgpu_vcn		vcn;
vcn               157 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 			for (j = 0; j < adev->vcn.num_vcn_inst; ++j) {
vcn               158 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 				if (adev->vcn.harvest_config & (1 << j))
vcn               160 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 				rings[num_rings++] = &adev->vcn.inst[j].ring_dec;
vcn               164 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 			for (j = 0; j < adev->vcn.num_vcn_inst; ++j) {
vcn               165 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 				if (adev->vcn.harvest_config & (1 << j))
vcn               167 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 				for (k = 0; k < adev->vcn.num_enc_rings; ++k)
vcn               168 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 					rings[num_rings++] = &adev->vcn.inst[j].ring_enc[k];
vcn               172 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 			for (j = 0; j < adev->vcn.num_vcn_inst; ++j) {
vcn               173 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 				if (adev->vcn.harvest_config & (1 << j))
vcn               175 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 				rings[num_rings++] = &adev->vcn.inst[j].ring_jpeg;
vcn                63 drivers/gpu/drm/amd/amdgpu/amdgpu_doorbell.h 		} vcn;
vcn               218 drivers/gpu/drm/amd/amdgpu/amdgpu_kms.c 		fw_info->ver = adev->vcn.fw_version;
vcn               375 drivers/gpu/drm/amd/amdgpu/amdgpu_kms.c 		for (i = 0; i < adev->vcn.num_vcn_inst; i++) {
vcn               379 drivers/gpu/drm/amd/amdgpu/amdgpu_kms.c 			if (adev->vcn.inst[i].ring_dec.sched.ready)
vcn               387 drivers/gpu/drm/amd/amdgpu/amdgpu_kms.c 		for (i = 0; i < adev->vcn.num_vcn_inst; i++) {
vcn               391 drivers/gpu/drm/amd/amdgpu/amdgpu_kms.c 			for (j = 0; j < adev->vcn.num_enc_rings; j++)
vcn               392 drivers/gpu/drm/amd/amdgpu/amdgpu_kms.c 				if (adev->vcn.inst[i].ring_enc[j].sched.ready)
vcn               400 drivers/gpu/drm/amd/amdgpu/amdgpu_kms.c 		for (i = 0; i < adev->vcn.num_vcn_inst; i++) {
vcn               404 drivers/gpu/drm/amd/amdgpu/amdgpu_kms.c 			if (adev->vcn.inst[i].ring_jpeg.sched.ready)
vcn               412 drivers/gpu/drm/amd/amdgpu/amdgpu_ucode.c FW_VERSION_ATTR(vcn_fw_version, 0444, vcn.fw_version);
vcn                74 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	INIT_DELAYED_WORK(&adev->vcn.idle_work, amdgpu_vcn_idle_work_handler);
vcn                92 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 			adev->vcn.indirect_sram = true;
vcn                98 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 			adev->vcn.indirect_sram = true;
vcn               104 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 			adev->vcn.indirect_sram = true;
vcn               110 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 			adev->vcn.indirect_sram = true;
vcn               116 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	r = request_firmware(&adev->vcn.fw, fw_name, adev->dev);
vcn               123 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	r = amdgpu_ucode_validate(adev->vcn.fw);
vcn               127 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 		release_firmware(adev->vcn.fw);
vcn               128 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 		adev->vcn.fw = NULL;
vcn               132 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	hdr = (const struct common_firmware_header *)adev->vcn.fw->data;
vcn               133 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	adev->vcn.fw_version = le32_to_cpu(hdr->ucode_version);
vcn               166 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	for (i = 0; i < adev->vcn.num_vcn_inst; i++) {
vcn               167 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 		if (adev->vcn.harvest_config & (1 << i))
vcn               171 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 						AMDGPU_GEM_DOMAIN_VRAM, &adev->vcn.inst[i].vcpu_bo,
vcn               172 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 						&adev->vcn.inst[i].gpu_addr, &adev->vcn.inst[i].cpu_addr);
vcn               179 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	if (adev->vcn.indirect_sram) {
vcn               181 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 			    AMDGPU_GEM_DOMAIN_VRAM, &adev->vcn.dpg_sram_bo,
vcn               182 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 			    &adev->vcn.dpg_sram_gpu_addr, &adev->vcn.dpg_sram_cpu_addr);
vcn               196 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	if (adev->vcn.indirect_sram) {
vcn               197 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 		amdgpu_bo_free_kernel(&adev->vcn.dpg_sram_bo,
vcn               198 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 				      &adev->vcn.dpg_sram_gpu_addr,
vcn               199 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 				      (void **)&adev->vcn.dpg_sram_cpu_addr);
vcn               202 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	for (j = 0; j < adev->vcn.num_vcn_inst; ++j) {
vcn               203 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 		if (adev->vcn.harvest_config & (1 << j))
vcn               205 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 		kvfree(adev->vcn.inst[j].saved_bo);
vcn               207 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 		amdgpu_bo_free_kernel(&adev->vcn.inst[j].vcpu_bo,
vcn               208 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 					  &adev->vcn.inst[j].gpu_addr,
vcn               209 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 					  (void **)&adev->vcn.inst[j].cpu_addr);
vcn               211 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 		amdgpu_ring_fini(&adev->vcn.inst[j].ring_dec);
vcn               213 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 		for (i = 0; i < adev->vcn.num_enc_rings; ++i)
vcn               214 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 			amdgpu_ring_fini(&adev->vcn.inst[j].ring_enc[i]);
vcn               216 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 		amdgpu_ring_fini(&adev->vcn.inst[j].ring_jpeg);
vcn               219 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	release_firmware(adev->vcn.fw);
vcn               230 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	cancel_delayed_work_sync(&adev->vcn.idle_work);
vcn               232 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	for (i = 0; i < adev->vcn.num_vcn_inst; ++i) {
vcn               233 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 		if (adev->vcn.harvest_config & (1 << i))
vcn               235 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 		if (adev->vcn.inst[i].vcpu_bo == NULL)
vcn               238 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 		size = amdgpu_bo_size(adev->vcn.inst[i].vcpu_bo);
vcn               239 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 		ptr = adev->vcn.inst[i].cpu_addr;
vcn               241 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 		adev->vcn.inst[i].saved_bo = kvmalloc(size, GFP_KERNEL);
vcn               242 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 		if (!adev->vcn.inst[i].saved_bo)
vcn               245 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 		memcpy_fromio(adev->vcn.inst[i].saved_bo, ptr, size);
vcn               256 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	for (i = 0; i < adev->vcn.num_vcn_inst; ++i) {
vcn               257 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 		if (adev->vcn.harvest_config & (1 << i))
vcn               259 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 		if (adev->vcn.inst[i].vcpu_bo == NULL)
vcn               262 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 		size = amdgpu_bo_size(adev->vcn.inst[i].vcpu_bo);
vcn               263 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 		ptr = adev->vcn.inst[i].cpu_addr;
vcn               265 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 		if (adev->vcn.inst[i].saved_bo != NULL) {
vcn               266 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 			memcpy_toio(ptr, adev->vcn.inst[i].saved_bo, size);
vcn               267 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 			kvfree(adev->vcn.inst[i].saved_bo);
vcn               268 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 			adev->vcn.inst[i].saved_bo = NULL;
vcn               273 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 			hdr = (const struct common_firmware_header *)adev->vcn.fw->data;
vcn               276 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 				memcpy_toio(adev->vcn.inst[i].cpu_addr, adev->vcn.fw->data + offset,
vcn               290 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 		container_of(work, struct amdgpu_device, vcn.idle_work.work);
vcn               294 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	for (j = 0; j < adev->vcn.num_vcn_inst; ++j) {
vcn               295 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 		if (adev->vcn.harvest_config & (1 << j))
vcn               297 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 		for (i = 0; i < adev->vcn.num_enc_rings; ++i) {
vcn               298 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 			fence[j] += amdgpu_fence_count_emitted(&adev->vcn.inst[j].ring_enc[i]);
vcn               309 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 			if (amdgpu_fence_count_emitted(&adev->vcn.inst[j].ring_jpeg))
vcn               314 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 			adev->vcn.pause_dpg_mode(adev, &new_state);
vcn               317 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 		fence[j] += amdgpu_fence_count_emitted(&adev->vcn.inst[j].ring_jpeg);
vcn               318 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 		fence[j] += amdgpu_fence_count_emitted(&adev->vcn.inst[j].ring_dec);
vcn               330 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 		schedule_delayed_work(&adev->vcn.idle_work, VCN_IDLE_TIMEOUT);
vcn               337 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	bool set_clocks = !cancel_delayed_work_sync(&adev->vcn.idle_work);
vcn               353 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 		for (i = 0; i < adev->vcn.num_enc_rings; ++i) {
vcn               354 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 			fences += amdgpu_fence_count_emitted(&adev->vcn.inst[ring->me].ring_enc[i]);
vcn               361 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 		if (amdgpu_fence_count_emitted(&adev->vcn.inst[ring->me].ring_jpeg))
vcn               371 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 		adev->vcn.pause_dpg_mode(adev, &new_state);
vcn               377 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	schedule_delayed_work(&ring->adev->vcn.idle_work, VCN_IDLE_TIMEOUT);
vcn               387 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	WREG32(adev->vcn.inst[ring->me].external.scratch9, 0xCAFEDEAD);
vcn               391 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	amdgpu_ring_write(ring, PACKET0(adev->vcn.internal.scratch9, 0));
vcn               395 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 		tmp = RREG32(adev->vcn.inst[ring->me].external.scratch9);
vcn               424 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	ib->ptr[0] = PACKET0(adev->vcn.internal.data0, 0);
vcn               426 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	ib->ptr[2] = PACKET0(adev->vcn.internal.data1, 0);
vcn               428 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	ib->ptr[4] = PACKET0(adev->vcn.internal.cmd, 0);
vcn               431 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 		ib->ptr[i] = PACKET0(adev->vcn.internal.nop, 0);
vcn               717 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	WREG32(adev->vcn.inst[ring->me].external.jpeg_pitch, 0xCAFEDEAD);
vcn               722 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	amdgpu_ring_write(ring, PACKET0(adev->vcn.internal.jpeg_pitch, 0));
vcn               727 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 		tmp = RREG32(adev->vcn.inst[ring->me].external.jpeg_pitch);
vcn               755 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	ib->ptr[0] = PACKETJ(adev->vcn.internal.jpeg_pitch, 0, 0, PACKETJ_TYPE0);
vcn               801 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 		tmp = RREG32(adev->vcn.inst[ring->me].external.jpeg_pitch);
vcn               121 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.h 			*adev->vcn.dpg_sram_curr_addr++ = offset; 				\
vcn               122 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.h 			*adev->vcn.dpg_sram_curr_addr++ = value; 				\
vcn               553 drivers/gpu/drm/amd/amdgpu/nv.c 	adev->doorbell_index.vcn.vcn_ring0_1 = AMDGPU_NAVI10_DOORBELL64_VCN0_1;
vcn               554 drivers/gpu/drm/amd/amdgpu/nv.c 	adev->doorbell_index.vcn.vcn_ring2_3 = AMDGPU_NAVI10_DOORBELL64_VCN2_3;
vcn               555 drivers/gpu/drm/amd/amdgpu/nv.c 	adev->doorbell_index.vcn.vcn_ring4_5 = AMDGPU_NAVI10_DOORBELL64_VCN4_5;
vcn               556 drivers/gpu/drm/amd/amdgpu/nv.c 	adev->doorbell_index.vcn.vcn_ring6_7 = AMDGPU_NAVI10_DOORBELL64_VCN6_7;
vcn                66 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 	adev->vcn.num_vcn_inst = 1;
vcn                67 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 	adev->vcn.num_enc_rings = 2;
vcn                92 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 			VCN_1_0__SRCID__UVD_SYSTEM_MESSAGE_INTERRUPT, &adev->vcn.inst->irq);
vcn                97 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 	for (i = 0; i < adev->vcn.num_enc_rings; ++i) {
vcn                99 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 					&adev->vcn.inst->irq);
vcn               105 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 	r = amdgpu_irq_add_id(adev, SOC15_IH_CLIENTID_VCN, 126, &adev->vcn.inst->irq);
vcn               115 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 		hdr = (const struct common_firmware_header *)adev->vcn.fw->data;
vcn               117 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 		adev->firmware.ucode[AMDGPU_UCODE_ID_VCN].fw = adev->vcn.fw;
vcn               127 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 	ring = &adev->vcn.inst->ring_dec;
vcn               129 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 	r = amdgpu_ring_init(adev, ring, 512, &adev->vcn.inst->irq, 0);
vcn               133 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 	adev->vcn.internal.scratch9 = adev->vcn.inst->external.scratch9 =
vcn               135 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 	adev->vcn.internal.data0 = adev->vcn.inst->external.data0 =
vcn               137 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 	adev->vcn.internal.data1 = adev->vcn.inst->external.data1 =
vcn               139 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 	adev->vcn.internal.cmd = adev->vcn.inst->external.cmd =
vcn               141 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 	adev->vcn.internal.nop = adev->vcn.inst->external.nop =
vcn               144 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 	for (i = 0; i < adev->vcn.num_enc_rings; ++i) {
vcn               145 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 		ring = &adev->vcn.inst->ring_enc[i];
vcn               147 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 		r = amdgpu_ring_init(adev, ring, 512, &adev->vcn.inst->irq, 0);
vcn               152 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 	ring = &adev->vcn.inst->ring_jpeg;
vcn               154 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 	r = amdgpu_ring_init(adev, ring, 512, &adev->vcn.inst->irq, 0);
vcn               158 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 	adev->vcn.pause_dpg_mode = vcn_v1_0_pause_dpg_mode;
vcn               159 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 	adev->vcn.internal.jpeg_pitch = adev->vcn.inst->external.jpeg_pitch =
vcn               196 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 	struct amdgpu_ring *ring = &adev->vcn.inst->ring_dec;
vcn               203 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 	for (i = 0; i < adev->vcn.num_enc_rings; ++i) {
vcn               204 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 		ring = &adev->vcn.inst->ring_enc[i];
vcn               211 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 	ring = &adev->vcn.inst->ring_jpeg;
vcn               234 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 	struct amdgpu_ring *ring = &adev->vcn.inst->ring_dec;
vcn               296 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 	uint32_t size = AMDGPU_GPU_PAGE_ALIGN(adev->vcn.fw->size + 4);
vcn               309 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 			lower_32_bits(adev->vcn.inst->gpu_addr));
vcn               311 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 			upper_32_bits(adev->vcn.inst->gpu_addr));
vcn               321 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 		     lower_32_bits(adev->vcn.inst->gpu_addr + offset));
vcn               323 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 		     upper_32_bits(adev->vcn.inst->gpu_addr + offset));
vcn               329 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 		     lower_32_bits(adev->vcn.inst->gpu_addr + offset + AMDGPU_VCN_STACK_SIZE));
vcn               331 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 		     upper_32_bits(adev->vcn.inst->gpu_addr + offset + AMDGPU_VCN_STACK_SIZE));
vcn               363 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 	uint32_t size = AMDGPU_GPU_PAGE_ALIGN(adev->vcn.fw->size + 4);
vcn               379 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 			lower_32_bits(adev->vcn.inst->gpu_addr), 0xFFFFFFFF, 0);
vcn               381 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 			upper_32_bits(adev->vcn.inst->gpu_addr), 0xFFFFFFFF, 0);
vcn               391 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 		     lower_32_bits(adev->vcn.inst->gpu_addr + offset), 0xFFFFFFFF, 0);
vcn               393 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 		     upper_32_bits(adev->vcn.inst->gpu_addr + offset), 0xFFFFFFFF, 0);
vcn               401 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 		     lower_32_bits(adev->vcn.inst->gpu_addr + offset + AMDGPU_VCN_STACK_SIZE),
vcn               404 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 		     upper_32_bits(adev->vcn.inst->gpu_addr + offset + AMDGPU_VCN_STACK_SIZE),
vcn               784 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 	struct amdgpu_ring *ring = &adev->vcn.inst->ring_dec;
vcn               937 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 	ring = &adev->vcn.inst->ring_enc[0];
vcn               944 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 	ring = &adev->vcn.inst->ring_enc[1];
vcn               951 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 	ring = &adev->vcn.inst->ring_jpeg;
vcn               973 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 	struct amdgpu_ring *ring = &adev->vcn.inst->ring_dec;
vcn              1111 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 	ring = &adev->vcn.inst->ring_jpeg;
vcn              1235 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 	if (adev->vcn.pause_state.fw_based != new_state->fw_based) {
vcn              1237 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 			adev->vcn.pause_state.fw_based, adev->vcn.pause_state.jpeg,
vcn              1260 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 				ring = &adev->vcn.inst->ring_enc[0];
vcn              1267 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 				ring = &adev->vcn.inst->ring_enc[1];
vcn              1274 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 				ring = &adev->vcn.inst->ring_dec;
vcn              1286 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 		adev->vcn.pause_state.fw_based = new_state->fw_based;
vcn              1290 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 	if (adev->vcn.pause_state.jpeg != new_state->jpeg) {
vcn              1292 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 			adev->vcn.pause_state.fw_based, adev->vcn.pause_state.jpeg,
vcn              1320 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 				ring = &adev->vcn.inst->ring_jpeg;
vcn              1334 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 				ring = &adev->vcn.inst->ring_dec;
vcn              1346 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 		adev->vcn.pause_state.jpeg = new_state->jpeg;
vcn              1601 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 	if (ring == &adev->vcn.inst->ring_enc[0])
vcn              1618 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 	if (ring == &adev->vcn.inst->ring_enc[0])
vcn              1635 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 	if (ring == &adev->vcn.inst->ring_enc[0])
vcn              2119 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 		amdgpu_fence_process(&adev->vcn.inst->ring_dec);
vcn              2122 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 		amdgpu_fence_process(&adev->vcn.inst->ring_enc[0]);
vcn              2125 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 		amdgpu_fence_process(&adev->vcn.inst->ring_enc[1]);
vcn              2128 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 		amdgpu_fence_process(&adev->vcn.inst->ring_jpeg);
vcn              2165 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 	if(state == adev->vcn.cur_state)
vcn              2174 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 		adev->vcn.cur_state = state;
vcn              2300 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 	adev->vcn.inst->ring_dec.funcs = &vcn_v1_0_dec_ring_vm_funcs;
vcn              2308 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 	for (i = 0; i < adev->vcn.num_enc_rings; ++i)
vcn              2309 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 		adev->vcn.inst->ring_enc[i].funcs = &vcn_v1_0_enc_ring_vm_funcs;
vcn              2316 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 	adev->vcn.inst->ring_jpeg.funcs = &vcn_v1_0_jpeg_ring_vm_funcs;
vcn              2327 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 	adev->vcn.inst->irq.num_types = adev->vcn.num_enc_rings + 2;
vcn              2328 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 	adev->vcn.inst->irq.funcs = &vcn_v1_0_irq_funcs;
vcn                95 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	adev->vcn.num_vcn_inst = 1;
vcn                96 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	adev->vcn.num_enc_rings = 2;
vcn               122 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 			      &adev->vcn.inst->irq);
vcn               127 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	for (i = 0; i < adev->vcn.num_enc_rings; ++i) {
vcn               130 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 				      &adev->vcn.inst->irq);
vcn               137 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 			      VCN_2_0__SRCID__JPEG_DECODE, &adev->vcn.inst->irq);
vcn               147 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 		hdr = (const struct common_firmware_header *)adev->vcn.fw->data;
vcn               149 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 		adev->firmware.ucode[AMDGPU_UCODE_ID_VCN].fw = adev->vcn.fw;
vcn               159 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	ring = &adev->vcn.inst->ring_dec;
vcn               162 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	ring->doorbell_index = adev->doorbell_index.vcn.vcn_ring0_1 << 1;
vcn               165 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	r = amdgpu_ring_init(adev, ring, 512, &adev->vcn.inst->irq, 0);
vcn               169 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	adev->vcn.internal.context_id = mmUVD_CONTEXT_ID_INTERNAL_OFFSET;
vcn               170 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	adev->vcn.internal.ib_vmid = mmUVD_LMI_RBC_IB_VMID_INTERNAL_OFFSET;
vcn               171 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	adev->vcn.internal.ib_bar_low = mmUVD_LMI_RBC_IB_64BIT_BAR_LOW_INTERNAL_OFFSET;
vcn               172 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	adev->vcn.internal.ib_bar_high = mmUVD_LMI_RBC_IB_64BIT_BAR_HIGH_INTERNAL_OFFSET;
vcn               173 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	adev->vcn.internal.ib_size = mmUVD_RBC_IB_SIZE_INTERNAL_OFFSET;
vcn               174 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	adev->vcn.internal.gp_scratch8 = mmUVD_GP_SCRATCH8_INTERNAL_OFFSET;
vcn               176 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	adev->vcn.internal.scratch9 = mmUVD_SCRATCH9_INTERNAL_OFFSET;
vcn               177 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	adev->vcn.inst->external.scratch9 = SOC15_REG_OFFSET(UVD, 0, mmUVD_SCRATCH9);
vcn               178 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	adev->vcn.internal.data0 = mmUVD_GPCOM_VCPU_DATA0_INTERNAL_OFFSET;
vcn               179 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	adev->vcn.inst->external.data0 = SOC15_REG_OFFSET(UVD, 0, mmUVD_GPCOM_VCPU_DATA0);
vcn               180 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	adev->vcn.internal.data1 = mmUVD_GPCOM_VCPU_DATA1_INTERNAL_OFFSET;
vcn               181 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	adev->vcn.inst->external.data1 = SOC15_REG_OFFSET(UVD, 0, mmUVD_GPCOM_VCPU_DATA1);
vcn               182 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	adev->vcn.internal.cmd = mmUVD_GPCOM_VCPU_CMD_INTERNAL_OFFSET;
vcn               183 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	adev->vcn.inst->external.cmd = SOC15_REG_OFFSET(UVD, 0, mmUVD_GPCOM_VCPU_CMD);
vcn               184 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	adev->vcn.internal.nop = mmUVD_NO_OP_INTERNAL_OFFSET;
vcn               185 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	adev->vcn.inst->external.nop = SOC15_REG_OFFSET(UVD, 0, mmUVD_NO_OP);
vcn               187 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	for (i = 0; i < adev->vcn.num_enc_rings; ++i) {
vcn               188 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 		ring = &adev->vcn.inst->ring_enc[i];
vcn               190 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 		ring->doorbell_index = (adev->doorbell_index.vcn.vcn_ring0_1 << 1) + 2 + i;
vcn               192 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 		r = amdgpu_ring_init(adev, ring, 512, &adev->vcn.inst->irq, 0);
vcn               197 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	ring = &adev->vcn.inst->ring_jpeg;
vcn               199 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	ring->doorbell_index = (adev->doorbell_index.vcn.vcn_ring0_1 << 1) + 1;
vcn               201 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	r = amdgpu_ring_init(adev, ring, 512, &adev->vcn.inst->irq, 0);
vcn               205 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	adev->vcn.pause_dpg_mode = vcn_v2_0_pause_dpg_mode;
vcn               207 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	adev->vcn.internal.jpeg_pitch = mmUVD_JPEG_PITCH_INTERNAL_OFFSET;
vcn               208 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	adev->vcn.inst->external.jpeg_pitch = SOC15_REG_OFFSET(UVD, 0, mmUVD_JPEG_PITCH);
vcn               244 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	struct amdgpu_ring *ring = &adev->vcn.inst->ring_dec;
vcn               257 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	for (i = 0; i < adev->vcn.num_enc_rings; ++i) {
vcn               258 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 		ring = &adev->vcn.inst->ring_enc[i];
vcn               267 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	ring = &adev->vcn.inst->ring_jpeg;
vcn               293 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	struct amdgpu_ring *ring = &adev->vcn.inst->ring_dec;
vcn               297 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	    (adev->vcn.cur_state != AMD_PG_STATE_GATE &&
vcn               303 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	for (i = 0; i < adev->vcn.num_enc_rings; ++i) {
vcn               304 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 		ring = &adev->vcn.inst->ring_enc[i];
vcn               308 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	ring = &adev->vcn.inst->ring_jpeg;
vcn               365 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	uint32_t size = AMDGPU_GPU_PAGE_ALIGN(adev->vcn.fw->size + 4);
vcn               378 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 			lower_32_bits(adev->vcn.inst->gpu_addr));
vcn               380 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 			upper_32_bits(adev->vcn.inst->gpu_addr));
vcn               390 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 		lower_32_bits(adev->vcn.inst->gpu_addr + offset));
vcn               392 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 		upper_32_bits(adev->vcn.inst->gpu_addr + offset));
vcn               398 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 		lower_32_bits(adev->vcn.inst->gpu_addr + offset + AMDGPU_VCN_STACK_SIZE));
vcn               400 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 		upper_32_bits(adev->vcn.inst->gpu_addr + offset + AMDGPU_VCN_STACK_SIZE));
vcn               410 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	uint32_t size = AMDGPU_GPU_PAGE_ALIGN(adev->vcn.fw->size + 4);
vcn               436 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 			lower_32_bits(adev->vcn.inst->gpu_addr), 0, indirect);
vcn               439 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 			upper_32_bits(adev->vcn.inst->gpu_addr), 0, indirect);
vcn               457 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 			lower_32_bits(adev->vcn.inst->gpu_addr + offset), 0, indirect);
vcn               460 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 			upper_32_bits(adev->vcn.inst->gpu_addr + offset), 0, indirect);
vcn               477 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 		lower_32_bits(adev->vcn.inst->gpu_addr + offset + AMDGPU_VCN_STACK_SIZE), 0, indirect);
vcn               480 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 		upper_32_bits(adev->vcn.inst->gpu_addr + offset + AMDGPU_VCN_STACK_SIZE), 0, indirect);
vcn               668 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	struct amdgpu_ring *ring = &adev->vcn.inst->ring_jpeg;
vcn               930 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	struct amdgpu_ring *ring = &adev->vcn.inst->ring_dec;
vcn               942 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 		adev->vcn.dpg_sram_curr_addr = (uint32_t*)adev->vcn.dpg_sram_cpu_addr;
vcn              1016 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 		psp_update_vcn_sram(adev, 0, adev->vcn.dpg_sram_gpu_addr,
vcn              1017 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 				    (uint32_t)((uintptr_t)adev->vcn.dpg_sram_curr_addr -
vcn              1018 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 					       (uintptr_t)adev->vcn.dpg_sram_cpu_addr));
vcn              1056 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	struct amdgpu_ring *ring = &adev->vcn.inst->ring_dec;
vcn              1065 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 		r = vcn_v2_0_start_dpg_mode(adev, adev->vcn.indirect_sram);
vcn              1207 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	ring = &adev->vcn.inst->ring_enc[0];
vcn              1214 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	ring = &adev->vcn.inst->ring_enc[1];
vcn              1339 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	if (adev->vcn.pause_state.fw_based != new_state->fw_based) {
vcn              1341 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 			adev->vcn.pause_state.fw_based,	new_state->fw_based);
vcn              1361 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 				ring = &adev->vcn.inst->ring_enc[0];
vcn              1368 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 				ring = &adev->vcn.inst->ring_enc[1];
vcn              1387 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 		adev->vcn.pause_state.fw_based = new_state->fw_based;
vcn              1494 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	amdgpu_ring_write(ring, PACKET0(adev->vcn.internal.data0, 0));
vcn              1496 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	amdgpu_ring_write(ring, PACKET0(adev->vcn.internal.cmd, 0));
vcn              1511 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	amdgpu_ring_write(ring, PACKET0(adev->vcn.internal.cmd, 0));
vcn              1530 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 		amdgpu_ring_write(ring, PACKET0(adev->vcn.internal.nop, 0));
vcn              1549 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	amdgpu_ring_write(ring, PACKET0(adev->vcn.internal.context_id, 0));
vcn              1552 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	amdgpu_ring_write(ring, PACKET0(adev->vcn.internal.data0, 0));
vcn              1555 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	amdgpu_ring_write(ring, PACKET0(adev->vcn.internal.data1, 0));
vcn              1558 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	amdgpu_ring_write(ring, PACKET0(adev->vcn.internal.cmd, 0));
vcn              1561 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	amdgpu_ring_write(ring, PACKET0(adev->vcn.internal.data0, 0));
vcn              1564 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	amdgpu_ring_write(ring, PACKET0(adev->vcn.internal.data1, 0));
vcn              1567 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	amdgpu_ring_write(ring, PACKET0(adev->vcn.internal.cmd, 0));
vcn              1588 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	amdgpu_ring_write(ring, PACKET0(adev->vcn.internal.ib_vmid, 0));
vcn              1591 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	amdgpu_ring_write(ring,	PACKET0(adev->vcn.internal.ib_bar_low, 0));
vcn              1593 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	amdgpu_ring_write(ring,	PACKET0(adev->vcn.internal.ib_bar_high, 0));
vcn              1595 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	amdgpu_ring_write(ring,	PACKET0(adev->vcn.internal.ib_size, 0));
vcn              1604 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	amdgpu_ring_write(ring, PACKET0(adev->vcn.internal.data0, 0));
vcn              1607 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	amdgpu_ring_write(ring, PACKET0(adev->vcn.internal.data1, 0));
vcn              1610 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	amdgpu_ring_write(ring, PACKET0(adev->vcn.internal.gp_scratch8, 0));
vcn              1613 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	amdgpu_ring_write(ring, PACKET0(adev->vcn.internal.cmd, 0));
vcn              1638 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	amdgpu_ring_write(ring, PACKET0(adev->vcn.internal.data0, 0));
vcn              1641 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	amdgpu_ring_write(ring, PACKET0(adev->vcn.internal.data1, 0));
vcn              1644 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	amdgpu_ring_write(ring, PACKET0(adev->vcn.internal.cmd, 0));
vcn              1660 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	if (ring == &adev->vcn.inst->ring_enc[0])
vcn              1677 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	if (ring == &adev->vcn.inst->ring_enc[0]) {
vcn              1701 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	if (ring == &adev->vcn.inst->ring_enc[0]) {
vcn              2075 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 		amdgpu_fence_process(&adev->vcn.inst->ring_dec);
vcn              2078 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 		amdgpu_fence_process(&adev->vcn.inst->ring_enc[0]);
vcn              2081 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 		amdgpu_fence_process(&adev->vcn.inst->ring_enc[1]);
vcn              2084 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 		amdgpu_fence_process(&adev->vcn.inst->ring_jpeg);
vcn              2102 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	WREG32(adev->vcn.inst[ring->me].external.scratch9, 0xCAFEDEAD);
vcn              2106 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	amdgpu_ring_write(ring, PACKET0(adev->vcn.internal.cmd, 0));
vcn              2108 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	amdgpu_ring_write(ring, PACKET0(adev->vcn.internal.scratch9, 0));
vcn              2112 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 		tmp = RREG32(adev->vcn.inst[ring->me].external.scratch9);
vcn              2138 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	if (state == adev->vcn.cur_state)
vcn              2147 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 		adev->vcn.cur_state = state;
vcn              2263 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	adev->vcn.inst->ring_dec.funcs = &vcn_v2_0_dec_ring_vm_funcs;
vcn              2271 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	for (i = 0; i < adev->vcn.num_enc_rings; ++i)
vcn              2272 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 		adev->vcn.inst->ring_enc[i].funcs = &vcn_v2_0_enc_ring_vm_funcs;
vcn              2279 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	adev->vcn.inst->ring_jpeg.funcs = &vcn_v2_0_jpeg_ring_vm_funcs;
vcn              2290 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	adev->vcn.inst->irq.num_types = adev->vcn.num_enc_rings + 2;
vcn              2291 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	adev->vcn.inst->irq.funcs = &vcn_v2_0_irq_funcs;
vcn                79 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		adev->vcn.num_vcn_inst = VCN25_MAX_HW_INSTANCES_ARCTURUS;
vcn                80 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		for (i = 0; i < adev->vcn.num_vcn_inst; i++) {
vcn                83 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 				adev->vcn.harvest_config |= 1 << i;
vcn                86 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		if (adev->vcn.harvest_config == (AMDGPU_VCN_HARVEST_VCN0 |
vcn                91 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		adev->vcn.num_vcn_inst = 1;
vcn                93 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 	adev->vcn.num_enc_rings = 2;
vcn               116 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 	for (j = 0; j < adev->vcn.num_vcn_inst; j++) {
vcn               117 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		if (adev->vcn.harvest_config & (1 << j))
vcn               121 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 				VCN_2_0__SRCID__UVD_SYSTEM_MESSAGE_INTERRUPT, &adev->vcn.inst[j].irq);
vcn               126 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		for (i = 0; i < adev->vcn.num_enc_rings; ++i) {
vcn               128 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 				i + VCN_2_0__SRCID__UVD_ENC_GENERAL_PURPOSE, &adev->vcn.inst[j].irq);
vcn               135 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 				VCN_2_0__SRCID__JPEG_DECODE, &adev->vcn.inst[j].irq);
vcn               146 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		hdr = (const struct common_firmware_header *)adev->vcn.fw->data;
vcn               148 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		adev->firmware.ucode[AMDGPU_UCODE_ID_VCN].fw = adev->vcn.fw;
vcn               152 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		if (adev->vcn.num_vcn_inst == VCN25_MAX_HW_INSTANCES_ARCTURUS) {
vcn               154 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 			adev->firmware.ucode[AMDGPU_UCODE_ID_VCN1].fw = adev->vcn.fw;
vcn               165 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 	for (j = 0; j < adev->vcn.num_vcn_inst; j++) {
vcn               166 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		if (adev->vcn.harvest_config & (1 << j))
vcn               168 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		adev->vcn.internal.context_id = mmUVD_CONTEXT_ID_INTERNAL_OFFSET;
vcn               169 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		adev->vcn.internal.ib_vmid = mmUVD_LMI_RBC_IB_VMID_INTERNAL_OFFSET;
vcn               170 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		adev->vcn.internal.ib_bar_low = mmUVD_LMI_RBC_IB_64BIT_BAR_LOW_INTERNAL_OFFSET;
vcn               171 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		adev->vcn.internal.ib_bar_high = mmUVD_LMI_RBC_IB_64BIT_BAR_HIGH_INTERNAL_OFFSET;
vcn               172 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		adev->vcn.internal.ib_size = mmUVD_RBC_IB_SIZE_INTERNAL_OFFSET;
vcn               173 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		adev->vcn.internal.gp_scratch8 = mmUVD_GP_SCRATCH8_INTERNAL_OFFSET;
vcn               175 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		adev->vcn.internal.scratch9 = mmUVD_SCRATCH9_INTERNAL_OFFSET;
vcn               176 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		adev->vcn.inst[j].external.scratch9 = SOC15_REG_OFFSET(UVD, j, mmUVD_SCRATCH9);
vcn               177 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		adev->vcn.internal.data0 = mmUVD_GPCOM_VCPU_DATA0_INTERNAL_OFFSET;
vcn               178 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		adev->vcn.inst[j].external.data0 = SOC15_REG_OFFSET(UVD, j, mmUVD_GPCOM_VCPU_DATA0);
vcn               179 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		adev->vcn.internal.data1 = mmUVD_GPCOM_VCPU_DATA1_INTERNAL_OFFSET;
vcn               180 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		adev->vcn.inst[j].external.data1 = SOC15_REG_OFFSET(UVD, j, mmUVD_GPCOM_VCPU_DATA1);
vcn               181 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		adev->vcn.internal.cmd = mmUVD_GPCOM_VCPU_CMD_INTERNAL_OFFSET;
vcn               182 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		adev->vcn.inst[j].external.cmd = SOC15_REG_OFFSET(UVD, j, mmUVD_GPCOM_VCPU_CMD);
vcn               183 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		adev->vcn.internal.nop = mmUVD_NO_OP_INTERNAL_OFFSET;
vcn               184 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		adev->vcn.inst[j].external.nop = SOC15_REG_OFFSET(UVD, j, mmUVD_NO_OP);
vcn               186 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		adev->vcn.internal.jpeg_pitch = mmUVD_JPEG_PITCH_INTERNAL_OFFSET;
vcn               187 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		adev->vcn.inst[j].external.jpeg_pitch = SOC15_REG_OFFSET(UVD, j, mmUVD_JPEG_PITCH);
vcn               189 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		ring = &adev->vcn.inst[j].ring_dec;
vcn               191 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		ring->doorbell_index = (adev->doorbell_index.vcn.vcn_ring0_1 << 1) + 8*j;
vcn               193 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		r = amdgpu_ring_init(adev, ring, 512, &adev->vcn.inst[j].irq, 0);
vcn               197 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		for (i = 0; i < adev->vcn.num_enc_rings; ++i) {
vcn               198 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 			ring = &adev->vcn.inst[j].ring_enc[i];
vcn               200 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 			ring->doorbell_index = (adev->doorbell_index.vcn.vcn_ring0_1 << 1) + 2 + i + 8*j;
vcn               202 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 			r = amdgpu_ring_init(adev, ring, 512, &adev->vcn.inst[j].irq, 0);
vcn               207 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		ring = &adev->vcn.inst[j].ring_jpeg;
vcn               209 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		ring->doorbell_index = (adev->doorbell_index.vcn.vcn_ring0_1 << 1) + 1 + 8*j;
vcn               211 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		r = amdgpu_ring_init(adev, ring, 512, &adev->vcn.inst[j].irq, 0);
vcn               253 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 	for (j = 0; j < adev->vcn.num_vcn_inst; ++j) {
vcn               254 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		if (adev->vcn.harvest_config & (1 << j))
vcn               256 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		ring = &adev->vcn.inst[j].ring_dec;
vcn               267 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		for (i = 0; i < adev->vcn.num_enc_rings; ++i) {
vcn               268 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 			ring = &adev->vcn.inst[j].ring_enc[i];
vcn               278 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		ring = &adev->vcn.inst[j].ring_jpeg;
vcn               305 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 	for (i = 0; i < adev->vcn.num_vcn_inst; ++i) {
vcn               306 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		if (adev->vcn.harvest_config & (1 << i))
vcn               308 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		ring = &adev->vcn.inst[i].ring_dec;
vcn               315 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		for (i = 0; i < adev->vcn.num_enc_rings; ++i) {
vcn               316 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 			ring = &adev->vcn.inst[i].ring_enc[i];
vcn               320 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		ring = &adev->vcn.inst[i].ring_jpeg;
vcn               378 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 	uint32_t size = AMDGPU_GPU_PAGE_ALIGN(adev->vcn.fw->size + 4);
vcn               382 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 	for (i = 0; i < adev->vcn.num_vcn_inst; ++i) {
vcn               383 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		if (adev->vcn.harvest_config & (1 << i))
vcn               395 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 				lower_32_bits(adev->vcn.inst[i].gpu_addr));
vcn               397 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 				upper_32_bits(adev->vcn.inst[i].gpu_addr));
vcn               406 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 			lower_32_bits(adev->vcn.inst[i].gpu_addr + offset));
vcn               408 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 			upper_32_bits(adev->vcn.inst[i].gpu_addr + offset));
vcn               414 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 			lower_32_bits(adev->vcn.inst[i].gpu_addr + offset + AMDGPU_VCN_STACK_SIZE));
vcn               416 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 			upper_32_bits(adev->vcn.inst[i].gpu_addr + offset + AMDGPU_VCN_STACK_SIZE));
vcn               435 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 	for (i = 0; i < adev->vcn.num_vcn_inst; ++i) {
vcn               436 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		if (adev->vcn.harvest_config & (1 << i))
vcn               552 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 	for (i = 0; i < adev->vcn.num_vcn_inst; ++i) {
vcn               553 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		if (adev->vcn.harvest_config & (1 << i))
vcn               615 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 	for (i = 0; i < adev->vcn.num_vcn_inst; ++i) {
vcn               616 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		if (adev->vcn.harvest_config & (1 << i))
vcn               618 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		ring = &adev->vcn.inst[i].ring_jpeg;
vcn               687 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 	for (i = 0; i < adev->vcn.num_vcn_inst; ++i) {
vcn               688 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		if (adev->vcn.harvest_config & (1 << i))
vcn               717 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 	for (i = 0; i < adev->vcn.num_vcn_inst; ++i) {
vcn               718 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		if (adev->vcn.harvest_config & (1 << i))
vcn               732 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 	for (i = 0; i < adev->vcn.num_vcn_inst; ++i) {
vcn               733 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		if (adev->vcn.harvest_config & (1 << i))
vcn               781 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 	for (i = 0; i < adev->vcn.num_vcn_inst; ++i) {
vcn               782 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		if (adev->vcn.harvest_config & (1 << i))
vcn               845 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		ring = &adev->vcn.inst[i].ring_dec;
vcn               867 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		ring = &adev->vcn.inst[i].ring_enc[0];
vcn               874 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		ring = &adev->vcn.inst[i].ring_enc[1];
vcn               895 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 	for (i = 0; i < adev->vcn.num_vcn_inst; ++i) {
vcn               896 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		if (adev->vcn.harvest_config & (1 << i))
vcn              1041 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 	if (ring == &adev->vcn.inst[ring->me].ring_enc[0])
vcn              1058 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 	if (ring == &adev->vcn.inst[ring->me].ring_enc[0]) {
vcn              1082 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 	if (ring == &adev->vcn.inst[ring->me].ring_enc[0]) {
vcn              1213 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 	for (i = 0; i < adev->vcn.num_vcn_inst; ++i) {
vcn              1214 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		if (adev->vcn.harvest_config & (1 << i))
vcn              1216 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		adev->vcn.inst[i].ring_dec.funcs = &vcn_v2_5_dec_ring_vm_funcs;
vcn              1217 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		adev->vcn.inst[i].ring_dec.me = i;
vcn              1226 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 	for (j = 0; j < adev->vcn.num_vcn_inst; ++j) {
vcn              1227 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		if (adev->vcn.harvest_config & (1 << j))
vcn              1229 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		for (i = 0; i < adev->vcn.num_enc_rings; ++i) {
vcn              1230 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 			adev->vcn.inst[j].ring_enc[i].funcs = &vcn_v2_5_enc_ring_vm_funcs;
vcn              1231 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 			adev->vcn.inst[j].ring_enc[i].me = j;
vcn              1241 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 	for (i = 0; i < adev->vcn.num_vcn_inst; ++i) {
vcn              1242 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		if (adev->vcn.harvest_config & (1 << i))
vcn              1244 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		adev->vcn.inst[i].ring_jpeg.funcs = &vcn_v2_5_jpeg_ring_vm_funcs;
vcn              1245 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		adev->vcn.inst[i].ring_jpeg.me = i;
vcn              1255 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 	for (i = 0; i < adev->vcn.num_vcn_inst; ++i) {
vcn              1256 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		if (adev->vcn.harvest_config & (1 << i))
vcn              1269 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 	for (i = 0; i < adev->vcn.num_vcn_inst; ++i) {
vcn              1270 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		if (adev->vcn.harvest_config & (1 << i))
vcn              1304 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 	if(state == adev->vcn.cur_state)
vcn              1313 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		adev->vcn.cur_state = state;
vcn              1348 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		amdgpu_fence_process(&adev->vcn.inst[ip_instance].ring_dec);
vcn              1351 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		amdgpu_fence_process(&adev->vcn.inst[ip_instance].ring_enc[0]);
vcn              1354 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		amdgpu_fence_process(&adev->vcn.inst[ip_instance].ring_enc[1]);
vcn              1357 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		amdgpu_fence_process(&adev->vcn.inst[ip_instance].ring_jpeg);
vcn              1377 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 	for (i = 0; i < adev->vcn.num_vcn_inst; ++i) {
vcn              1378 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		if (adev->vcn.harvest_config & (1 << i))
vcn              1380 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		adev->vcn.inst[i].irq.num_types = adev->vcn.num_enc_rings + 2;
vcn              1381 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c 		adev->vcn.inst[i].irq.funcs = &vcn_v2_5_irq_funcs;
vcn                84 drivers/gpu/drm/amd/amdgpu/vega10_reg_init.c 	adev->doorbell_index.vcn.vcn_ring0_1 = AMDGPU_DOORBELL64_VCN0_1;
vcn                85 drivers/gpu/drm/amd/amdgpu/vega10_reg_init.c 	adev->doorbell_index.vcn.vcn_ring2_3 = AMDGPU_DOORBELL64_VCN2_3;
vcn                86 drivers/gpu/drm/amd/amdgpu/vega10_reg_init.c 	adev->doorbell_index.vcn.vcn_ring4_5 = AMDGPU_DOORBELL64_VCN4_5;
vcn                87 drivers/gpu/drm/amd/amdgpu/vega10_reg_init.c 	adev->doorbell_index.vcn.vcn_ring6_7 = AMDGPU_DOORBELL64_VCN6_7;
vcn                90 drivers/gpu/drm/amd/amdgpu/vega20_reg_init.c 	adev->doorbell_index.vcn.vcn_ring0_1 = AMDGPU_VEGA20_DOORBELL64_VCN0_1;
vcn                91 drivers/gpu/drm/amd/amdgpu/vega20_reg_init.c 	adev->doorbell_index.vcn.vcn_ring2_3 = AMDGPU_VEGA20_DOORBELL64_VCN2_3;
vcn                92 drivers/gpu/drm/amd/amdgpu/vega20_reg_init.c 	adev->doorbell_index.vcn.vcn_ring4_5 = AMDGPU_VEGA20_DOORBELL64_VCN4_5;
vcn                93 drivers/gpu/drm/amd/amdgpu/vega20_reg_init.c 	adev->doorbell_index.vcn.vcn_ring6_7 = AMDGPU_VEGA20_DOORBELL64_VCN6_7;
vcn               170 fs/ntfs/aops.c 	VCN vcn;
vcn               245 fs/ntfs/aops.c 			vcn = (VCN)iblock << blocksize_bits >>
vcn               256 fs/ntfs/aops.c 				while (rl->length && rl[1].vcn <= vcn)
vcn               258 fs/ntfs/aops.c 				lcn = ntfs_rl_vcn_to_lcn(rl, vcn);
vcn               286 fs/ntfs/aops.c 				err = ntfs_map_runlist(ni, vcn);
vcn               311 fs/ntfs/aops.c 					ni->type, (unsigned long long)vcn,
vcn               535 fs/ntfs/aops.c 	VCN vcn;
vcn               702 fs/ntfs/aops.c 		vcn = (VCN)block << blocksize_bits;
vcn               703 fs/ntfs/aops.c 		vcn_ofs = vcn & vol->cluster_size_mask;
vcn               704 fs/ntfs/aops.c 		vcn >>= vol->cluster_size_bits;
vcn               712 fs/ntfs/aops.c 			while (rl->length && rl[1].vcn <= vcn)
vcn               714 fs/ntfs/aops.c 			lcn = ntfs_rl_vcn_to_lcn(rl, vcn);
vcn               764 fs/ntfs/aops.c 			err = ntfs_map_runlist(ni, vcn);
vcn               791 fs/ntfs/aops.c 				ni->type, (unsigned long long)vcn,
vcn              1005 fs/ntfs/aops.c 			VCN vcn;
vcn              1011 fs/ntfs/aops.c 			vcn = (VCN)block << bh_size_bits;
vcn              1012 fs/ntfs/aops.c 			vcn_ofs = vcn & vol->cluster_size_mask;
vcn              1013 fs/ntfs/aops.c 			vcn >>= vol->cluster_size_bits;
vcn              1021 fs/ntfs/aops.c 				while (rl->length && rl[1].vcn <= vcn)
vcn              1023 fs/ntfs/aops.c 				lcn = ntfs_rl_vcn_to_lcn(rl, vcn);
vcn              1047 fs/ntfs/aops.c 					err2 = ntfs_map_runlist(ni, vcn);
vcn                70 fs/ntfs/attrib.c int ntfs_map_runlist_nolock(ntfs_inode *ni, VCN vcn, ntfs_attr_search_ctx *ctx)
vcn                84 fs/ntfs/attrib.c 			(unsigned long long)vcn);
vcn               120 fs/ntfs/attrib.c 		if (vcn >= allocated_size_vcn || (a->type == ni->type &&
vcn               125 fs/ntfs/attrib.c 				<= vcn && end_vcn >= vcn))
vcn               153 fs/ntfs/attrib.c 				CASE_SENSITIVE, vcn, NULL, 0, ctx);
vcn               169 fs/ntfs/attrib.c 	if (unlikely(vcn && vcn >= end_vcn)) {
vcn               284 fs/ntfs/attrib.c int ntfs_map_runlist(ntfs_inode *ni, VCN vcn)
vcn               290 fs/ntfs/attrib.c 	if (likely(ntfs_rl_vcn_to_lcn(ni->runlist.rl, vcn) <=
vcn               292 fs/ntfs/attrib.c 		err = ntfs_map_runlist_nolock(ni, vcn, NULL);
vcn               327 fs/ntfs/attrib.c LCN ntfs_attr_vcn_to_lcn_nolock(ntfs_inode *ni, const VCN vcn,
vcn               336 fs/ntfs/attrib.c 			ni->mft_no, (unsigned long long)vcn,
vcn               339 fs/ntfs/attrib.c 	BUG_ON(vcn < 0);
vcn               350 fs/ntfs/attrib.c 	lcn = ntfs_rl_vcn_to_lcn(ni->runlist.rl, vcn);
vcn               364 fs/ntfs/attrib.c 			if (unlikely(ntfs_rl_vcn_to_lcn(ni->runlist.rl, vcn) !=
vcn               371 fs/ntfs/attrib.c 		err = ntfs_map_runlist_nolock(ni, vcn, NULL);
vcn               450 fs/ntfs/attrib.c runlist_element *ntfs_attr_find_vcn_nolock(ntfs_inode *ni, const VCN vcn,
vcn               460 fs/ntfs/attrib.c 			ni->mft_no, (unsigned long long)vcn, ctx ? "" : "out");
vcn               462 fs/ntfs/attrib.c 	BUG_ON(vcn < 0);
vcn               473 fs/ntfs/attrib.c 	if (likely(rl && vcn >= rl[0].vcn)) {
vcn               475 fs/ntfs/attrib.c 			if (unlikely(vcn < rl[1].vcn)) {
vcn               503 fs/ntfs/attrib.c 			err = ntfs_map_runlist_nolock(ni, vcn, ctx);
vcn               730 fs/ntfs/attrib.c 		lcn = ntfs_rl_vcn_to_lcn(rl, rl->vcn);
vcn               732 fs/ntfs/attrib.c 				(unsigned long long)rl->vcn,
vcn              1893 fs/ntfs/attrib.c 	VCN vcn;
vcn              1997 fs/ntfs/attrib.c 	vcn = NInoNonResident(ni) ? allocated_size >> vol->cluster_size_bits :
vcn              2014 fs/ntfs/attrib.c 		vcn = 0;
vcn              2017 fs/ntfs/attrib.c 			CASE_SENSITIVE, vcn, NULL, 0, ctx);
vcn              2139 fs/ntfs/attrib.c 		BUG_ON(vcn);
vcn                49 fs/ntfs/attrib.h extern int ntfs_map_runlist_nolock(ntfs_inode *ni, VCN vcn,
vcn                51 fs/ntfs/attrib.h extern int ntfs_map_runlist(ntfs_inode *ni, VCN vcn);
vcn                53 fs/ntfs/attrib.h extern LCN ntfs_attr_vcn_to_lcn_nolock(ntfs_inode *ni, const VCN vcn,
vcn                57 fs/ntfs/attrib.h 		const VCN vcn, ntfs_attr_search_ctx *ctx);
vcn               478 fs/ntfs/compress.c 	VCN vcn;
vcn               596 fs/ntfs/compress.c 	for (vcn = start_vcn, start_vcn += cb_clusters; vcn < start_vcn;
vcn               597 fs/ntfs/compress.c 			vcn++) {
vcn               607 fs/ntfs/compress.c 			while (rl->length && rl[1].vcn <= vcn)
vcn               609 fs/ntfs/compress.c 			lcn = ntfs_rl_vcn_to_lcn(rl, vcn);
vcn               613 fs/ntfs/compress.c 				(unsigned long long)vcn,
vcn               630 fs/ntfs/compress.c 			if (!ntfs_map_runlist(ni, vcn))
vcn               730 fs/ntfs/compress.c 	if (vcn == start_vcn - cb_clusters) {
vcn               773 fs/ntfs/compress.c 	} else if (vcn == start_vcn) {
vcn               143 fs/ntfs/debug.c 					(long long)(rl + i)->vcn, lcn_str[index],
vcn               149 fs/ntfs/debug.c 					(long long)(rl + i)->vcn,
vcn                79 fs/ntfs/dir.c  	VCN vcn, old_vcn;
vcn               291 fs/ntfs/dir.c  	vcn = sle64_to_cpup((sle64*)((u8*)ie + le16_to_cpu(ie->length) - 8));
vcn               307 fs/ntfs/dir.c  	page = ntfs_map_page(ia_mapping, vcn <<
vcn               319 fs/ntfs/dir.c  	ia = (INDEX_ALLOCATION*)(kaddr + ((vcn <<
vcn               331 fs/ntfs/dir.c  				(unsigned long long)vcn, dir_ni->mft_no);
vcn               334 fs/ntfs/dir.c  	if (sle64_to_cpu(ia->index_block_vcn) != vcn) {
vcn               340 fs/ntfs/dir.c  				(unsigned long long)vcn, dir_ni->mft_no);
vcn               349 fs/ntfs/dir.c  				(unsigned long long)vcn, dir_ni->mft_no,
vcn               359 fs/ntfs/dir.c  				"driver.", (unsigned long long)vcn,
vcn               367 fs/ntfs/dir.c  				(unsigned long long)vcn, dir_ni->mft_no);
vcn               541 fs/ntfs/dir.c  		old_vcn = vcn;
vcn               542 fs/ntfs/dir.c  		vcn = sle64_to_cpup((sle64*)((u8*)ie +
vcn               544 fs/ntfs/dir.c  		if (vcn >= 0) {
vcn               548 fs/ntfs/dir.c  					PAGE_SHIFT == vcn <<
vcn               633 fs/ntfs/dir.c  	VCN vcn, old_vcn;
vcn               769 fs/ntfs/dir.c  	vcn = sle64_to_cpup((u8*)ie + le16_to_cpu(ie->length) - 8);
vcn               785 fs/ntfs/dir.c  	page = ntfs_map_page(ia_mapping, vcn <<
vcn               797 fs/ntfs/dir.c  	ia = (INDEX_ALLOCATION*)(kaddr + ((vcn <<
vcn               809 fs/ntfs/dir.c  				(unsigned long long)vcn, dir_ni->mft_no);
vcn               812 fs/ntfs/dir.c  	if (sle64_to_cpu(ia->index_block_vcn) != vcn) {
vcn               818 fs/ntfs/dir.c  				(unsigned long long)vcn, dir_ni->mft_no);
vcn               827 fs/ntfs/dir.c  				(unsigned long long)vcn, dir_ni->mft_no,
vcn               837 fs/ntfs/dir.c  				"driver.", (unsigned long long)vcn,
vcn               845 fs/ntfs/dir.c  				(unsigned long long)vcn, dir_ni->mft_no);
vcn               951 fs/ntfs/dir.c  		old_vcn = vcn;
vcn               952 fs/ntfs/dir.c  		vcn = sle64_to_cpup((u8*)ie + le16_to_cpu(ie->length) - 8);
vcn               953 fs/ntfs/dir.c  		if (vcn >= 0) {
vcn               957 fs/ntfs/dir.c  					PAGE_SHIFT == vcn <<
vcn               573 fs/ntfs/file.c 	VCN vcn, highest_vcn = 0, cpos, cend, bh_cpos, bh_cend;
vcn               628 fs/ntfs/file.c 	vcn = lcn = -1;
vcn               706 fs/ntfs/file.c 		cdelta = bh_cpos - vcn;
vcn               836 fs/ntfs/file.c 			while (rl->length && rl[1].vcn <= bh_cpos)
vcn               845 fs/ntfs/file.c 				vcn = bh_cpos;
vcn               846 fs/ntfs/file.c 				vcn_len = rl[1].vcn - vcn;
vcn               857 fs/ntfs/file.c 				if (likely(vcn + vcn_len >= cend)) {
vcn              1042 fs/ntfs/file.c 		vcn = sle64_to_cpu(a->data.non_resident.lowest_vcn);
vcn              1043 fs/ntfs/file.c 		rl2 = ntfs_rl_find_vcn_nolock(rl, vcn);
vcn              1060 fs/ntfs/file.c 		mp_size = ntfs_get_size_for_mapping_pairs(vol, rl2, vcn,
vcn              1104 fs/ntfs/file.c 				mp_size, rl2, vcn, highest_vcn, NULL);
vcn              1159 fs/ntfs/file.c 		vcn = bh_cpos;
vcn              1168 fs/ntfs/file.c 		if (likely(vcn + vcn_len >= cend)) {
vcn              1304 fs/ntfs/file.c 					vcn, highest_vcn, NULL)) {
vcn              1779 fs/ntfs/file.c 		VCN vcn;
vcn              1789 fs/ntfs/file.c 			vcn = pos >> vol->cluster_size_bits;
vcn              1790 fs/ntfs/file.c 			if (vcn != last_vcn) {
vcn              1791 fs/ntfs/file.c 				last_vcn = vcn;
vcn               108 fs/ntfs/index.c 	VCN vcn, old_vcn;
vcn               248 fs/ntfs/index.c 	vcn = sle64_to_cpup((sle64*)((u8*)ie + le16_to_cpu(ie->length) - 8));
vcn               264 fs/ntfs/index.c 	page = ntfs_map_page(ia_mapping, vcn <<
vcn               276 fs/ntfs/index.c 	ia = (INDEX_ALLOCATION*)(kaddr + ((vcn <<
vcn               288 fs/ntfs/index.c 				(long long)vcn, idx_ni->mft_no);
vcn               291 fs/ntfs/index.c 	if (sle64_to_cpu(ia->index_block_vcn) != vcn) {
vcn               297 fs/ntfs/index.c 				(unsigned long long)vcn, idx_ni->mft_no);
vcn               305 fs/ntfs/index.c 				"driver bug.", (unsigned long long)vcn,
vcn               316 fs/ntfs/index.c 				"driver.", (unsigned long long)vcn,
vcn               324 fs/ntfs/index.c 				(unsigned long long)vcn, idx_ni->mft_no);
vcn               408 fs/ntfs/index.c 	old_vcn = vcn;
vcn               409 fs/ntfs/index.c 	vcn = sle64_to_cpup((sle64*)((u8*)ie + le16_to_cpu(ie->length) - 8));
vcn               410 fs/ntfs/index.c 	if (vcn >= 0) {
vcn               416 fs/ntfs/index.c 				PAGE_SHIFT == vcn <<
vcn               388 fs/ntfs/lcnalloc.c 					rl[rlpos].vcn = rl[rlpos - 1].vcn +
vcn               393 fs/ntfs/lcnalloc.c 					rl[rlpos].vcn = start_vcn;
vcn               726 fs/ntfs/lcnalloc.c 		rl[rlpos].vcn = rl[rlpos - 1].vcn + rl[rlpos - 1].length;
vcn               883 fs/ntfs/lcnalloc.c 	delta = start_vcn - rl->vcn;
vcn               916 fs/ntfs/lcnalloc.c 			VCN vcn;
vcn               919 fs/ntfs/lcnalloc.c 			vcn = rl->vcn;
vcn               920 fs/ntfs/lcnalloc.c 			rl = ntfs_attr_find_vcn_nolock(ni, vcn, ctx);
vcn               715 fs/ntfs/logfile.c 	VCN vcn, end_vcn;
vcn               739 fs/ntfs/logfile.c 	vcn = 0;
vcn               747 fs/ntfs/logfile.c 	if (unlikely(!rl || vcn < rl->vcn || !rl->length)) {
vcn               749 fs/ntfs/logfile.c 		err = ntfs_map_runlist_nolock(log_ni, vcn, NULL);
vcn               756 fs/ntfs/logfile.c 		BUG_ON(!rl || vcn < rl->vcn || !rl->length);
vcn               759 fs/ntfs/logfile.c 	while (rl->length && vcn >= rl[1].vcn)
vcn               772 fs/ntfs/logfile.c 			vcn = rl->vcn;
vcn               783 fs/ntfs/logfile.c 		if (rl[1].vcn > end_vcn)
vcn               784 fs/ntfs/logfile.c 			len = end_vcn - rl->vcn;
vcn               820 fs/ntfs/logfile.c 	} while ((++rl)->vcn < end_vcn);
vcn               525 fs/ntfs/mft.c  			VCN vcn;
vcn               531 fs/ntfs/mft.c  			vcn = ((VCN)mft_no << vol->mft_record_size_bits) +
vcn               533 fs/ntfs/mft.c  			vcn_ofs = vcn & vol->cluster_size_mask;
vcn               534 fs/ntfs/mft.c  			vcn >>= vol->cluster_size_bits;
vcn               546 fs/ntfs/mft.c  			while (rl->length && rl[1].vcn <= vcn)
vcn               548 fs/ntfs/mft.c  			lcn = ntfs_rl_vcn_to_lcn(rl, vcn);
vcn               718 fs/ntfs/mft.c  			VCN vcn;
vcn               724 fs/ntfs/mft.c  			vcn = ((VCN)ni->mft_no << vol->mft_record_size_bits) +
vcn               726 fs/ntfs/mft.c  			vcn_ofs = vcn & vol->cluster_size_mask;
vcn               727 fs/ntfs/mft.c  			vcn >>= vol->cluster_size_bits;
vcn               734 fs/ntfs/mft.c  			while (rl->length && rl[1].vcn <= vcn)
vcn               736 fs/ntfs/mft.c  			lcn = ntfs_rl_vcn_to_lcn(rl, vcn);
vcn              1344 fs/ntfs/mft.c  		rl[1].vcn++;
vcn              1351 fs/ntfs/mft.c  		rl2 = ntfs_cluster_alloc(vol, rl[1].vcn, 1, lcn, DATA_ZONE,
vcn              1396 fs/ntfs/mft.c  			mftbmp_ni->name_len, CASE_SENSITIVE, rl[1].vcn, NULL,
vcn              1409 fs/ntfs/mft.c  		if (ll >= rl2->vcn)
vcn              1412 fs/ntfs/mft.c  	BUG_ON(ll < rl2->vcn);
vcn              1413 fs/ntfs/mft.c  	BUG_ON(ll >= rl2->vcn + rl2->length);
vcn              1456 fs/ntfs/mft.c  	a->data.non_resident.highest_vcn = cpu_to_sle64(rl[1].vcn - 1);
vcn              1495 fs/ntfs/mft.c  			mftbmp_ni->name_len, CASE_SENSITIVE, rl[1].vcn, NULL,
vcn              1513 fs/ntfs/mft.c  	a->data.non_resident.highest_vcn = cpu_to_sle64(rl[1].vcn - 2);
vcn              1518 fs/ntfs/mft.c  		rl[1].vcn--;
vcn              1775 fs/ntfs/mft.c  	old_last_vcn = rl[1].vcn;
vcn              1829 fs/ntfs/mft.c  			CASE_SENSITIVE, rl[1].vcn, NULL, 0, ctx);
vcn              1841 fs/ntfs/mft.c  		if (ll >= rl2->vcn)
vcn              1844 fs/ntfs/mft.c  	BUG_ON(ll < rl2->vcn);
vcn              1845 fs/ntfs/mft.c  	BUG_ON(ll >= rl2->vcn + rl2->length);
vcn              1893 fs/ntfs/mft.c  	a->data.non_resident.highest_vcn = cpu_to_sle64(rl[1].vcn - 1);
vcn              1934 fs/ntfs/mft.c  			CASE_SENSITIVE, rl[1].vcn, NULL, 0, ctx)) {
vcn               151 fs/ntfs/runlist.c 	if ((dst->vcn + dst->length) != src->vcn)
vcn               238 fs/ntfs/runlist.c 	dst[loc].length = dst[loc + 1].vcn - dst[loc].vcn;
vcn               242 fs/ntfs/runlist.c 		dst[marker].vcn = dst[marker - 1].vcn + dst[marker - 1].length;
vcn               286 fs/ntfs/runlist.c 		disc = (src[0].vcn > 0);
vcn               296 fs/ntfs/runlist.c 		disc = (src[0].vcn > dst[loc - 1].vcn + merged_length);
vcn               325 fs/ntfs/runlist.c 	dst[marker].vcn = dst[marker - 1].vcn + dst[marker - 1].length;
vcn               328 fs/ntfs/runlist.c 		dst[marker].length = dst[marker + 1].vcn - dst[marker].vcn;
vcn               333 fs/ntfs/runlist.c 			dst[loc].vcn = dst[loc - 1].vcn + dst[loc - 1].length;
vcn               334 fs/ntfs/runlist.c 			dst[loc].length = dst[loc + 1].vcn - dst[loc].vcn;
vcn               336 fs/ntfs/runlist.c 			dst[loc].vcn = 0;
vcn               337 fs/ntfs/runlist.c 			dst[loc].length = dst[loc + 1].vcn;
vcn               428 fs/ntfs/runlist.c 		dst[marker].vcn = dst[marker - 1].vcn + dst[marker - 1].length;
vcn               476 fs/ntfs/runlist.c 	dst[loc].length		= dst[loc+1].vcn       - dst[loc].vcn;
vcn               477 fs/ntfs/runlist.c 	dst[loc+ssize+1].vcn    = dst[loc+ssize].vcn   + dst[loc+ssize].length;
vcn               478 fs/ntfs/runlist.c 	dst[loc+ssize+1].length = dst[loc+ssize+2].vcn - dst[loc+ssize+1].vcn;
vcn               546 fs/ntfs/runlist.c 		if (unlikely(drl[0].vcn)) {
vcn               556 fs/ntfs/runlist.c 			drl[0].vcn = 0;
vcn               558 fs/ntfs/runlist.c 			drl[0].length = drl[1].vcn;
vcn               581 fs/ntfs/runlist.c 		if (drl[di].vcn + drl[di].length > srl[sstart].vcn)
vcn               587 fs/ntfs/runlist.c 	if ((drl[di].vcn == srl[si].vcn) && (drl[di].lcn >= 0) &&
vcn               600 fs/ntfs/runlist.c 		marker_vcn = srl[marker = send].vcn;
vcn               615 fs/ntfs/runlist.c 		  (drl[dins].vcn == srl[sstart].vcn));	     /* Start of hole */
vcn               617 fs/ntfs/runlist.c 		 ((drl[dins].vcn + drl[dins].length) <=      /* End of hole   */
vcn               618 fs/ntfs/runlist.c 		  (srl[send - 1].vcn + srl[send - 1].length)));
vcn               623 fs/ntfs/runlist.c 	if (marker && (drl[dins].vcn + drl[dins].length > srl[send - 1].vcn))
vcn               652 fs/ntfs/runlist.c 		if (drl[ds].vcn <= marker_vcn) {
vcn               655 fs/ntfs/runlist.c 			if (drl[ds].vcn == marker_vcn) {
vcn               682 fs/ntfs/runlist.c 					drl[ds].vcn = drl[ds - 1].vcn +
vcn               688 fs/ntfs/runlist.c 			drl[ds].length = marker_vcn - drl[ds].vcn;
vcn               693 fs/ntfs/runlist.c 			drl[ds].vcn = marker_vcn;
vcn               738 fs/ntfs/runlist.c 	VCN vcn;		/* Current vcn. */
vcn               758 fs/ntfs/runlist.c 	vcn = sle64_to_cpu(attr->data.non_resident.lowest_vcn);
vcn               769 fs/ntfs/runlist.c 	if (!vcn && !*buf)
vcn               778 fs/ntfs/runlist.c 	if (vcn) {
vcn               779 fs/ntfs/runlist.c 		rl->vcn = 0;
vcn               781 fs/ntfs/runlist.c 		rl->length = vcn;
vcn               804 fs/ntfs/runlist.c 		rl[rlpos].vcn = vcn;
vcn               838 fs/ntfs/runlist.c 		vcn += deltaxcn;
vcn               892 fs/ntfs/runlist.c 	if (unlikely(deltaxcn && vcn - 1 != deltaxcn)) {
vcn               924 fs/ntfs/runlist.c 				rl[rlpos].vcn = vcn;
vcn               925 fs/ntfs/runlist.c 				vcn += rl[rlpos].length = max_cluster -
vcn               944 fs/ntfs/runlist.c 	rl[rlpos].vcn = vcn;
vcn               990 fs/ntfs/runlist.c LCN ntfs_rl_vcn_to_lcn(const runlist_element *rl, const VCN vcn)
vcn               994 fs/ntfs/runlist.c 	BUG_ON(vcn < 0);
vcn              1004 fs/ntfs/runlist.c 	if (unlikely(vcn < rl[0].vcn))
vcn              1008 fs/ntfs/runlist.c 		if (unlikely(vcn < rl[i+1].vcn)) {
vcn              1010 fs/ntfs/runlist.c 				return rl[i].lcn + (vcn - rl[i].vcn);
vcn              1039 fs/ntfs/runlist.c runlist_element *ntfs_rl_find_vcn_nolock(runlist_element *rl, const VCN vcn)
vcn              1041 fs/ntfs/runlist.c 	BUG_ON(vcn < 0);
vcn              1042 fs/ntfs/runlist.c 	if (unlikely(!rl || vcn < rl[0].vcn))
vcn              1045 fs/ntfs/runlist.c 		if (unlikely(vcn < rl[1].vcn)) {
vcn              1134 fs/ntfs/runlist.c 	while (rl->length && first_vcn >= rl[1].vcn)
vcn              1136 fs/ntfs/runlist.c 	if (unlikely((!rl->length && first_vcn > rl->vcn) ||
vcn              1137 fs/ntfs/runlist.c 			first_vcn < rl->vcn))
vcn              1143 fs/ntfs/runlist.c 	if (first_vcn > rl->vcn) {
vcn              1153 fs/ntfs/runlist.c 		if (unlikely(last_vcn >= 0 && rl[1].vcn > last_vcn)) {
vcn              1155 fs/ntfs/runlist.c 			if (unlikely(rl[1].vcn > s1))
vcn              1156 fs/ntfs/runlist.c 				length = s1 - rl->vcn;
vcn              1159 fs/ntfs/runlist.c 		delta = first_vcn - rl->vcn;
vcn              1189 fs/ntfs/runlist.c 		if (unlikely(last_vcn >= 0 && rl[1].vcn > last_vcn)) {
vcn              1191 fs/ntfs/runlist.c 			if (unlikely(rl[1].vcn > s1))
vcn              1192 fs/ntfs/runlist.c 				length = s1 - rl->vcn;
vcn              1333 fs/ntfs/runlist.c 	while (rl->length && first_vcn >= rl[1].vcn)
vcn              1335 fs/ntfs/runlist.c 	if (unlikely((!rl->length && first_vcn > rl->vcn) ||
vcn              1336 fs/ntfs/runlist.c 			first_vcn < rl->vcn))
vcn              1345 fs/ntfs/runlist.c 	if (first_vcn > rl->vcn) {
vcn              1355 fs/ntfs/runlist.c 		if (unlikely(last_vcn >= 0 && rl[1].vcn > last_vcn)) {
vcn              1357 fs/ntfs/runlist.c 			if (unlikely(rl[1].vcn > s1))
vcn              1358 fs/ntfs/runlist.c 				length = s1 - rl->vcn;
vcn              1361 fs/ntfs/runlist.c 		delta = first_vcn - rl->vcn;
vcn              1407 fs/ntfs/runlist.c 		if (unlikely(last_vcn >= 0 && rl[1].vcn > last_vcn)) {
vcn              1409 fs/ntfs/runlist.c 			if (unlikely(rl[1].vcn > s1))
vcn              1410 fs/ntfs/runlist.c 				length = s1 - rl->vcn;
vcn              1449 fs/ntfs/runlist.c 		*stop_vcn = rl->vcn;
vcn              1514 fs/ntfs/runlist.c 		rl[1].length = rl->vcn = 0;
vcn              1516 fs/ntfs/runlist.c 		rl[1].vcn = rl->length = new_length;
vcn              1520 fs/ntfs/runlist.c 	BUG_ON(new_length < rl->vcn);
vcn              1522 fs/ntfs/runlist.c 	while (likely(rl->length && new_length >= rl[1].vcn))
vcn              1539 fs/ntfs/runlist.c 		rl->length = new_length - rl->vcn;
vcn              1549 fs/ntfs/runlist.c 			rl->vcn = new_length;
vcn              1566 fs/ntfs/runlist.c 	} else if (likely(/* !rl->length && */ new_length > rl->vcn)) {
vcn              1574 fs/ntfs/runlist.c 			(rl - 1)->length = new_length - (rl - 1)->vcn;
vcn              1594 fs/ntfs/runlist.c 			rl->length = new_length - rl->vcn;
vcn              1599 fs/ntfs/runlist.c 		rl->vcn = new_length;
vcn              1652 fs/ntfs/runlist.c 	while (likely(rl->length && start >= rl[1].vcn))
vcn              1656 fs/ntfs/runlist.c 	while (likely(rl_end->length && end >= rl_end[1].vcn)) {
vcn              1666 fs/ntfs/runlist.c 	if (!rl_end->length && end > rl_end->vcn)
vcn              1683 fs/ntfs/runlist.c 		if (end <= rl[1].vcn) {
vcn              1689 fs/ntfs/runlist.c 		rl->length = end - rl->vcn;
vcn              1693 fs/ntfs/runlist.c 			rl->length = rl_end->vcn - rl->vcn;
vcn              1702 fs/ntfs/runlist.c 		if (end > rl->vcn) {
vcn              1703 fs/ntfs/runlist.c 			delta = end - rl->vcn;
vcn              1704 fs/ntfs/runlist.c 			rl->vcn = end;
vcn              1731 fs/ntfs/runlist.c 	if (start == rl->vcn) {
vcn              1747 fs/ntfs/runlist.c 		if (end >= rl[1].vcn) {
vcn              1775 fs/ntfs/runlist.c 		rl->vcn += length;
vcn              1793 fs/ntfs/runlist.c 		rl->length = start - rl->vcn;
vcn              1800 fs/ntfs/runlist.c 		rl->vcn = start;
vcn              1801 fs/ntfs/runlist.c 		rl->length = rl[1].vcn - start;
vcn              1815 fs/ntfs/runlist.c 	if (end >= rl[1].vcn) {
vcn              1820 fs/ntfs/runlist.c 		if (rl[1].length && end >= rl[2].vcn) {
vcn              1822 fs/ntfs/runlist.c 			rl->length = start - rl->vcn;
vcn              1824 fs/ntfs/runlist.c 			rl->vcn = start;
vcn              1839 fs/ntfs/runlist.c 		rl->length = start - rl->vcn;
vcn              1846 fs/ntfs/runlist.c 		delta = rl->vcn - start;
vcn              1847 fs/ntfs/runlist.c 		rl->vcn = start;
vcn              1876 fs/ntfs/runlist.c 	rl->length = start - rl->vcn;
vcn              1878 fs/ntfs/runlist.c 	rl->vcn = start;
vcn              1882 fs/ntfs/runlist.c 	delta = end - rl->vcn;
vcn              1883 fs/ntfs/runlist.c 	rl->vcn = end;
vcn                29 fs/ntfs/runlist.h 	VCN vcn;	/* vcn = Starting virtual cluster number. */
vcn                65 fs/ntfs/runlist.h extern LCN ntfs_rl_vcn_to_lcn(const runlist_element *rl, const VCN vcn);
vcn                70 fs/ntfs/runlist.h 		const VCN vcn);
vcn              1166 fs/ntfs/super.c 	rl2[0].vcn = 0;
vcn              1170 fs/ntfs/super.c 	rl2[1].vcn = rl2[0].length;
vcn              1183 fs/ntfs/super.c 		if (rl2[i].vcn != rl[i].vcn || rl2[i].lcn != rl[i].lcn ||