ib                575 arch/ia64/include/asm/pal.h 			ib		: 1,	/* Internal bus error */
ib                743 arch/ia64/include/asm/pal.h #define pmci_bus_internal_error			pme_bus.ib
ib                410 arch/ia64/kernel/mca_drv.c 	if (!pbci || pbci->ib)
ib                700 arch/ia64/kernel/mca_drv.c 	if (pbci->ib)
ib                120 arch/s390/include/asm/idals.h 	struct idal_buffer *ib;
ib                125 arch/s390/include/asm/idals.h 	ib = kmalloc(struct_size(ib, data, nr_ptrs), GFP_DMA | GFP_KERNEL);
ib                126 arch/s390/include/asm/idals.h 	if (ib == NULL)
ib                128 arch/s390/include/asm/idals.h 	ib->size = size;
ib                129 arch/s390/include/asm/idals.h 	ib->page_order = page_order;
ib                132 arch/s390/include/asm/idals.h 			ib->data[i] = ib->data[i-1] + IDA_BLOCK_SIZE;
ib                135 arch/s390/include/asm/idals.h 		ib->data[i] = (void *)
ib                137 arch/s390/include/asm/idals.h 		if (ib->data[i] != NULL)
ib                142 arch/s390/include/asm/idals.h 			free_pages((unsigned long) ib->data[i],
ib                143 arch/s390/include/asm/idals.h 				   ib->page_order);
ib                145 arch/s390/include/asm/idals.h 		kfree(ib);
ib                148 arch/s390/include/asm/idals.h 	return ib;
ib                155 arch/s390/include/asm/idals.h idal_buffer_free(struct idal_buffer *ib)
ib                159 arch/s390/include/asm/idals.h 	nr_ptrs = (ib->size + IDA_BLOCK_SIZE - 1) >> IDA_SIZE_LOG;
ib                160 arch/s390/include/asm/idals.h 	nr_chunks = (4096 << ib->page_order) >> IDA_SIZE_LOG;
ib                162 arch/s390/include/asm/idals.h 		free_pages((unsigned long) ib->data[i], ib->page_order);
ib                163 arch/s390/include/asm/idals.h 	kfree(ib);
ib                170 arch/s390/include/asm/idals.h __idal_buffer_is_needed(struct idal_buffer *ib)
ib                172 arch/s390/include/asm/idals.h 	return ib->size > (4096ul << ib->page_order) ||
ib                173 arch/s390/include/asm/idals.h 		idal_is_needed(ib->data[0], ib->size);
ib                180 arch/s390/include/asm/idals.h idal_buffer_set_cda(struct idal_buffer *ib, struct ccw1 *ccw)
ib                182 arch/s390/include/asm/idals.h 	if (__idal_buffer_is_needed(ib)) {
ib                184 arch/s390/include/asm/idals.h 		ccw->cda = (u32)(addr_t) ib->data;
ib                188 arch/s390/include/asm/idals.h 		ccw->cda = (u32)(addr_t) ib->data[0];
ib                189 arch/s390/include/asm/idals.h 	ccw->count = ib->size;
ib                196 arch/s390/include/asm/idals.h idal_buffer_to_user(struct idal_buffer *ib, void __user *to, size_t count)
ib                201 arch/s390/include/asm/idals.h 	BUG_ON(count > ib->size);
ib                203 arch/s390/include/asm/idals.h 		left = copy_to_user(to, ib->data[i], IDA_BLOCK_SIZE);
ib                209 arch/s390/include/asm/idals.h 	return copy_to_user(to, ib->data[i], count);
ib                216 arch/s390/include/asm/idals.h idal_buffer_from_user(struct idal_buffer *ib, const void __user *from, size_t count)
ib                221 arch/s390/include/asm/idals.h 	BUG_ON(count > ib->size);
ib                223 arch/s390/include/asm/idals.h 		left = copy_from_user(ib->data[i], from, IDA_BLOCK_SIZE);
ib                229 arch/s390/include/asm/idals.h 	return copy_from_user(ib->data[i], from, count);
ib                 45 arch/s390/include/asm/sclp.h 	u8 ib : 1;
ib                529 drivers/acpi/hmat/hmat.c 	struct memory_initiator *ib;
ib                533 drivers/acpi/hmat/hmat.c 	ib = list_entry(b, struct memory_initiator, node);
ib                536 drivers/acpi/hmat/hmat.c 	set_bit(ib->processor_pxm, p_nodes);
ib                538 drivers/acpi/hmat/hmat.c 	return ia->processor_pxm - ib->processor_pxm;
ib                 47 drivers/firmware/tegra/bpmp-tegra186.c 		channel->ib = NULL;
ib                 51 drivers/firmware/tegra/bpmp-tegra186.c 	channel->ib = frame;
ib                152 drivers/firmware/tegra/bpmp-tegra210.c 	channel->ib = p;
ib                203 drivers/firmware/tegra/bpmp.c 		memcpy(data, channel->ib->data, size);
ib                209 drivers/firmware/tegra/bpmp.c 	*ret = channel->ib->code;
ib                402 drivers/firmware/tegra/bpmp.c 	unsigned long flags = channel->ib->flags;
ib                534 drivers/firmware/tegra/bpmp.c 	request = (struct mrq_ping_request *)channel->ib->data;
ib                669 drivers/firmware/tegra/bpmp.c 		tegra_bpmp_handle_mrq(bpmp, channel->ib->code, channel);
ib                431 drivers/gpu/drm/amd/amdgpu/amdgpu.h 		  unsigned size, struct amdgpu_ib *ib);
ib                432 drivers/gpu/drm/amd/amdgpu/amdgpu.h void amdgpu_ib_free(struct amdgpu_device *adev, struct amdgpu_ib *ib,
ib                606 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c 	struct amdgpu_ib *ib;
ib                631 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c 	ib = &job->ibs[0];
ib                632 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c 	memset(ib, 0, sizeof(struct amdgpu_ib));
ib                634 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c 	ib->gpu_addr = gpu_addr;
ib                635 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c 	ib->ptr = ib_cmd;
ib                636 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c 	ib->length_dw = ib_len;
ib                640 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c 	ret = amdgpu_ib_schedule(ring, 1, ib, job, &f);
ib                805 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 			struct amdgpu_ib *ib;
ib                809 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 			ib = &p->job->ibs[j];
ib                838 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 				memcpy(ib->ptr, kptr, chunk_ib->ib_bytes);
ib                845 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 				ib->ptr = (uint32_t *)kptr;
ib                951 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		struct amdgpu_ib *ib;
ib                956 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		ib = &parser->job->ibs[j];
ib                993 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 				   chunk_ib->ib_bytes : 0, ib);
ib                999 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		ib->gpu_addr = chunk_ib->va_start;
ib               1000 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		ib->length_dw = chunk_ib->ib_bytes / 4;
ib               1001 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		ib->flags = chunk_ib->flags;
ib                 36 drivers/gpu/drm/amd/amdgpu/amdgpu_display.h #define amdgpu_display_add_connector(adev, ci, sd, ct, ib, coi, h, r) (adev)->mode_info.funcs->add_connector((adev), (ci), (sd), (ct), (ib), (coi), (h), (r))
ib                 65 drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c 		  unsigned size, struct amdgpu_ib *ib)
ib                 71 drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c 				      &ib->sa_bo, size, 256);
ib                 77 drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c 		ib->ptr = amdgpu_sa_bo_cpu_addr(ib->sa_bo);
ib                 80 drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c 			ib->gpu_addr = amdgpu_sa_bo_gpu_addr(ib->sa_bo);
ib                 95 drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c void amdgpu_ib_free(struct amdgpu_device *adev, struct amdgpu_ib *ib,
ib                 98 drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c 	amdgpu_sa_bo_free(adev, &ib->sa_bo, f);
ib                127 drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c 	struct amdgpu_ib *ib = &ibs[0];
ib                220 drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c 		ib = &ibs[i];
ib                223 drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c 		if ((ib->flags & AMDGPU_IB_FLAG_PREAMBLE) &&
ib                230 drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c 		amdgpu_ring_emit_ib(ring, job, ib, status);
ib                242 drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c 	if (ib->flags & AMDGPU_IB_FLAG_TC_WB_NOT_INVALIDATE)
ib                108 drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c void amdgpu_ring_generic_pad_ib(struct amdgpu_ring *ring, struct amdgpu_ib *ib)
ib                110 drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c 	while (ib->length_dw & ring->funcs->align_mask)
ib                111 drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c 		ib->ptr[ib->length_dw++] = ring->funcs->nop;
ib                134 drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h 			struct amdgpu_ib *ib,
ib                154 drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h 	void (*pad_ib)(struct amdgpu_ring *ring, struct amdgpu_ib *ib);
ib                231 drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h #define amdgpu_ring_parse_cs(r, p, ib) ((r)->funcs->parse_cs((p), (ib)))
ib                232 drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h #define amdgpu_ring_patch_cs_in_place(r, p, ib) ((r)->funcs->patch_cs_in_place((p), (ib)))
ib                238 drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h #define amdgpu_ring_emit_ib(r, job, ib, flags) ((r)->funcs->emit_ib((r), (job), (ib), (flags)))
ib                251 drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h #define amdgpu_ring_pad_ib(r, ib) ((r)->funcs->pad_ib((r), (ib)))
ib                258 drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h void amdgpu_ring_generic_pad_ib(struct amdgpu_ring *ring, struct amdgpu_ib *ib);
ib                 76 drivers/gpu/drm/amd/amdgpu/amdgpu_sdma.h 	void (*emit_copy_buffer)(struct amdgpu_ib *ib,
ib                 91 drivers/gpu/drm/amd/amdgpu/amdgpu_sdma.h 	void (*emit_fill_buffer)(struct amdgpu_ib *ib,
ib                100 drivers/gpu/drm/amd/amdgpu/amdgpu_sdma.h #define amdgpu_emit_copy_buffer(adev, ib, s, d, b) (adev)->mman.buffer_funcs->emit_copy_buffer((ib),  (s), (d), (b))
ib                101 drivers/gpu/drm/amd/amdgpu/amdgpu_sdma.h #define amdgpu_emit_fill_buffer(adev, ib, s, d, b) (adev)->mman.buffer_funcs->emit_fill_buffer((ib), (s), (d), (b))
ib                893 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 	struct amdgpu_ib *ib = &ctx->parser->job->ibs[ctx->ib_idx];
ib                900 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 		if (ctx->idx >= ib->length_dw) {
ib                940 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 	struct amdgpu_ib *ib = &ctx->parser->job->ibs[ctx->ib_idx];
ib                943 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 	for (ctx->idx = 0 ; ctx->idx < ib->length_dw; ) {
ib                982 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 	struct amdgpu_ib *ib = &parser->job->ibs[ib_idx];
ib                986 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 	ib->gpu_addr = amdgpu_sa_bo_gpu_addr(ib->sa_bo);
ib                988 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 	if (ib->length_dw % 16) {
ib                990 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 			  ib->length_dw);
ib               1025 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 	struct amdgpu_ib *ib;
ib               1061 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 	ib = &job->ibs[0];
ib               1063 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 	ib->ptr[0] = data[0];
ib               1064 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 	ib->ptr[1] = addr;
ib               1065 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 	ib->ptr[2] = data[1];
ib               1066 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 	ib->ptr[3] = addr >> 32;
ib               1067 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 	ib->ptr[4] = data[2];
ib               1068 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 	ib->ptr[5] = 0;
ib               1070 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 		ib->ptr[i] = data[3];
ib               1071 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 		ib->ptr[i+1] = 0;
ib               1073 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 	ib->length_dw = 16;
ib                437 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 	struct amdgpu_ib *ib;
ib                446 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 	ib = &job->ibs[0];
ib                451 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 	ib->length_dw = 0;
ib                452 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 	ib->ptr[ib->length_dw++] = 0x0000000c; /* len */
ib                453 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 	ib->ptr[ib->length_dw++] = 0x00000001; /* session cmd */
ib                454 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 	ib->ptr[ib->length_dw++] = handle;
ib                457 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 		ib->ptr[ib->length_dw++] = 0x00000040; /* len */
ib                459 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 		ib->ptr[ib->length_dw++] = 0x00000030; /* len */
ib                460 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 	ib->ptr[ib->length_dw++] = 0x01000001; /* create cmd */
ib                461 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 	ib->ptr[ib->length_dw++] = 0x00000000;
ib                462 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 	ib->ptr[ib->length_dw++] = 0x00000042;
ib                463 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 	ib->ptr[ib->length_dw++] = 0x0000000a;
ib                464 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 	ib->ptr[ib->length_dw++] = 0x00000001;
ib                465 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 	ib->ptr[ib->length_dw++] = 0x00000080;
ib                466 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 	ib->ptr[ib->length_dw++] = 0x00000060;
ib                467 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 	ib->ptr[ib->length_dw++] = 0x00000100;
ib                468 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 	ib->ptr[ib->length_dw++] = 0x00000100;
ib                469 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 	ib->ptr[ib->length_dw++] = 0x0000000c;
ib                470 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 	ib->ptr[ib->length_dw++] = 0x00000000;
ib                472 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 		ib->ptr[ib->length_dw++] = 0x00000000;
ib                473 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 		ib->ptr[ib->length_dw++] = 0x00000000;
ib                474 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 		ib->ptr[ib->length_dw++] = 0x00000000;
ib                475 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 		ib->ptr[ib->length_dw++] = 0x00000000;
ib                478 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 	ib->ptr[ib->length_dw++] = 0x00000014; /* len */
ib                479 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 	ib->ptr[ib->length_dw++] = 0x05000005; /* feedback buffer */
ib                480 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 	ib->ptr[ib->length_dw++] = upper_32_bits(addr);
ib                481 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 	ib->ptr[ib->length_dw++] = addr;
ib                482 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 	ib->ptr[ib->length_dw++] = 0x00000001;
ib                484 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 	for (i = ib->length_dw; i < ib_size_dw; ++i)
ib                485 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 		ib->ptr[i] = 0x0;
ib                516 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 	struct amdgpu_ib *ib;
ib                524 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 	ib = &job->ibs[0];
ib                527 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 	ib->length_dw = 0;
ib                528 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 	ib->ptr[ib->length_dw++] = 0x0000000c; /* len */
ib                529 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 	ib->ptr[ib->length_dw++] = 0x00000001; /* session cmd */
ib                530 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 	ib->ptr[ib->length_dw++] = handle;
ib                532 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 	ib->ptr[ib->length_dw++] = 0x00000020; /* len */
ib                533 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 	ib->ptr[ib->length_dw++] = 0x00000002; /* task info */
ib                534 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 	ib->ptr[ib->length_dw++] = 0xffffffff; /* next task info, set to 0xffffffff if no */
ib                535 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 	ib->ptr[ib->length_dw++] = 0x00000001; /* destroy session */
ib                536 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 	ib->ptr[ib->length_dw++] = 0x00000000;
ib                537 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 	ib->ptr[ib->length_dw++] = 0x00000000;
ib                538 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 	ib->ptr[ib->length_dw++] = 0xffffffff; /* feedback is not needed, set to 0xffffffff and firmware will not output feedback */
ib                539 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 	ib->ptr[ib->length_dw++] = 0x00000000;
ib                541 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 	ib->ptr[ib->length_dw++] = 0x00000008; /* len */
ib                542 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 	ib->ptr[ib->length_dw++] = 0x02000001; /* destroy cmd */
ib                544 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 	for (i = ib->length_dw; i < ib_size_dw; ++i)
ib                545 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 		ib->ptr[i] = 0x0;
ib                711 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 	struct amdgpu_ib *ib = &p->job->ibs[ib_idx];
ib                723 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 	ib->gpu_addr = amdgpu_sa_bo_gpu_addr(ib->sa_bo);
ib                725 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 	for (idx = 0; idx < ib->length_dw;) {
ib                791 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 	for (idx = 0; idx < ib->length_dw;) {
ib                947 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 	struct amdgpu_ib *ib = &p->job->ibs[ib_idx];
ib                955 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 	while (idx < ib->length_dw) {
ib               1016 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 		amdgpu_ib_free(p->adev, ib, NULL);
ib               1038 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 				struct amdgpu_ib *ib,
ib               1042 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 	amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr));
ib               1043 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 	amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr));
ib               1044 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 	amdgpu_ring_write(ring, ib->length_dw);
ib                 70 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.h 				struct amdgpu_ib *ib, uint32_t flags);
ib                414 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	struct amdgpu_ib *ib;
ib                422 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	ib = &job->ibs[0];
ib                424 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	ib->ptr[0] = PACKET0(adev->vcn.internal.data0, 0);
ib                425 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	ib->ptr[1] = addr;
ib                426 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	ib->ptr[2] = PACKET0(adev->vcn.internal.data1, 0);
ib                427 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	ib->ptr[3] = addr >> 32;
ib                428 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	ib->ptr[4] = PACKET0(adev->vcn.internal.cmd, 0);
ib                429 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	ib->ptr[5] = 0;
ib                431 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 		ib->ptr[i] = PACKET0(adev->vcn.internal.nop, 0);
ib                432 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 		ib->ptr[i+1] = 0;
ib                434 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	ib->length_dw = 16;
ib                577 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	struct amdgpu_ib *ib;
ib                586 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	ib = &job->ibs[0];
ib                589 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	ib->length_dw = 0;
ib                590 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	ib->ptr[ib->length_dw++] = 0x00000018;
ib                591 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	ib->ptr[ib->length_dw++] = 0x00000001; /* session info */
ib                592 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	ib->ptr[ib->length_dw++] = handle;
ib                593 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	ib->ptr[ib->length_dw++] = upper_32_bits(addr);
ib                594 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	ib->ptr[ib->length_dw++] = addr;
ib                595 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	ib->ptr[ib->length_dw++] = 0x0000000b;
ib                597 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	ib->ptr[ib->length_dw++] = 0x00000014;
ib                598 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	ib->ptr[ib->length_dw++] = 0x00000002; /* task info */
ib                599 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	ib->ptr[ib->length_dw++] = 0x0000001c;
ib                600 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	ib->ptr[ib->length_dw++] = 0x00000000;
ib                601 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	ib->ptr[ib->length_dw++] = 0x00000000;
ib                603 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	ib->ptr[ib->length_dw++] = 0x00000008;
ib                604 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	ib->ptr[ib->length_dw++] = 0x08000001; /* op initialize */
ib                606 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	for (i = ib->length_dw; i < ib_size_dw; ++i)
ib                607 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 		ib->ptr[i] = 0x0;
ib                630 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	struct amdgpu_ib *ib;
ib                639 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	ib = &job->ibs[0];
ib                642 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	ib->length_dw = 0;
ib                643 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	ib->ptr[ib->length_dw++] = 0x00000018;
ib                644 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	ib->ptr[ib->length_dw++] = 0x00000001;
ib                645 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	ib->ptr[ib->length_dw++] = handle;
ib                646 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	ib->ptr[ib->length_dw++] = upper_32_bits(addr);
ib                647 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	ib->ptr[ib->length_dw++] = addr;
ib                648 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	ib->ptr[ib->length_dw++] = 0x0000000b;
ib                650 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	ib->ptr[ib->length_dw++] = 0x00000014;
ib                651 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	ib->ptr[ib->length_dw++] = 0x00000002;
ib                652 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	ib->ptr[ib->length_dw++] = 0x0000001c;
ib                653 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	ib->ptr[ib->length_dw++] = 0x00000000;
ib                654 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	ib->ptr[ib->length_dw++] = 0x00000000;
ib                656 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	ib->ptr[ib->length_dw++] = 0x00000008;
ib                657 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	ib->ptr[ib->length_dw++] = 0x08000002; /* op close session */
ib                659 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	for (i = ib->length_dw; i < ib_size_dw; ++i)
ib                660 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 		ib->ptr[i] = 0x0;
ib                744 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	struct amdgpu_ib *ib;
ib                753 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	ib = &job->ibs[0];
ib                755 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	ib->ptr[0] = PACKETJ(adev->vcn.internal.jpeg_pitch, 0, 0, PACKETJ_TYPE0);
ib                756 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	ib->ptr[1] = 0xDEADBEEF;
ib                758 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 		ib->ptr[i] = PACKETJ(0, 0, 0, PACKETJ_TYPE6);
ib                759 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 		ib->ptr[i+1] = 0;
ib                761 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c 	ib->length_dw = 16;
ib                160 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.h 	void (*copy_pte)(struct amdgpu_ib *ib,
ib                165 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.h 	void (*write_pte)(struct amdgpu_ib *ib, uint64_t pe,
ib                169 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.h 	void (*set_pte_pde)(struct amdgpu_ib *ib,
ib                336 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.h #define amdgpu_vm_copy_pte(adev, ib, pe, src, count) ((adev)->vm_manager.vm_pte_funcs->copy_pte((ib), (pe), (src), (count)))
ib                337 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.h #define amdgpu_vm_write_pte(adev, ib, pe, value, count, incr) ((adev)->vm_manager.vm_pte_funcs->write_pte((ib), (pe), (value), (count), (incr)))
ib                338 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.h #define amdgpu_vm_set_pte_pde(adev, ib, pe, addr, count, incr, flags) ((adev)->vm_manager.vm_pte_funcs->set_pte_pde((ib), (pe), (addr), (count), (incr), (flags)))
ib                 97 drivers/gpu/drm/amd/amdgpu/amdgpu_vm_sdma.c 	struct amdgpu_ib *ib = p->job->ibs;
ib                104 drivers/gpu/drm/amd/amdgpu/amdgpu_vm_sdma.c 	WARN_ON(ib->length_dw == 0);
ib                105 drivers/gpu/drm/amd/amdgpu/amdgpu_vm_sdma.c 	amdgpu_ring_pad_ib(ring, ib);
ib                106 drivers/gpu/drm/amd/amdgpu/amdgpu_vm_sdma.c 	WARN_ON(ib->length_dw > p->num_dw_left);
ib                138 drivers/gpu/drm/amd/amdgpu/amdgpu_vm_sdma.c 	struct amdgpu_ib *ib = p->job->ibs;
ib                139 drivers/gpu/drm/amd/amdgpu/amdgpu_vm_sdma.c 	uint64_t src = ib->gpu_addr;
ib                146 drivers/gpu/drm/amd/amdgpu/amdgpu_vm_sdma.c 	amdgpu_vm_copy_pte(p->adev, ib, pe, src, count);
ib                168 drivers/gpu/drm/amd/amdgpu/amdgpu_vm_sdma.c 	struct amdgpu_ib *ib = p->job->ibs;
ib                173 drivers/gpu/drm/amd/amdgpu/amdgpu_vm_sdma.c 		amdgpu_vm_write_pte(p->adev, ib, pe, addr | flags,
ib                176 drivers/gpu/drm/amd/amdgpu/amdgpu_vm_sdma.c 		amdgpu_vm_set_pte_pde(p->adev, ib, pe, addr,
ib                224 drivers/gpu/drm/amd/amdgpu/cik_sdma.c 				  struct amdgpu_ib *ib,
ib                234 drivers/gpu/drm/amd/amdgpu/cik_sdma.c 	amdgpu_ring_write(ring, ib->gpu_addr & 0xffffffe0); /* base must be 32 byte aligned */
ib                235 drivers/gpu/drm/amd/amdgpu/cik_sdma.c 	amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xffffffff);
ib                236 drivers/gpu/drm/amd/amdgpu/cik_sdma.c 	amdgpu_ring_write(ring, ib->length_dw);
ib                667 drivers/gpu/drm/amd/amdgpu/cik_sdma.c 	struct amdgpu_ib ib;
ib                681 drivers/gpu/drm/amd/amdgpu/cik_sdma.c 	memset(&ib, 0, sizeof(ib));
ib                682 drivers/gpu/drm/amd/amdgpu/cik_sdma.c 	r = amdgpu_ib_get(adev, NULL, 256, &ib);
ib                686 drivers/gpu/drm/amd/amdgpu/cik_sdma.c 	ib.ptr[0] = SDMA_PACKET(SDMA_OPCODE_WRITE,
ib                688 drivers/gpu/drm/amd/amdgpu/cik_sdma.c 	ib.ptr[1] = lower_32_bits(gpu_addr);
ib                689 drivers/gpu/drm/amd/amdgpu/cik_sdma.c 	ib.ptr[2] = upper_32_bits(gpu_addr);
ib                690 drivers/gpu/drm/amd/amdgpu/cik_sdma.c 	ib.ptr[3] = 1;
ib                691 drivers/gpu/drm/amd/amdgpu/cik_sdma.c 	ib.ptr[4] = 0xDEADBEEF;
ib                692 drivers/gpu/drm/amd/amdgpu/cik_sdma.c 	ib.length_dw = 5;
ib                693 drivers/gpu/drm/amd/amdgpu/cik_sdma.c 	r = amdgpu_ib_schedule(ring, 1, &ib, NULL, &f);
ib                711 drivers/gpu/drm/amd/amdgpu/cik_sdma.c 	amdgpu_ib_free(adev, &ib, NULL);
ib                728 drivers/gpu/drm/amd/amdgpu/cik_sdma.c static void cik_sdma_vm_copy_pte(struct amdgpu_ib *ib,
ib                734 drivers/gpu/drm/amd/amdgpu/cik_sdma.c 	ib->ptr[ib->length_dw++] = SDMA_PACKET(SDMA_OPCODE_COPY,
ib                736 drivers/gpu/drm/amd/amdgpu/cik_sdma.c 	ib->ptr[ib->length_dw++] = bytes;
ib                737 drivers/gpu/drm/amd/amdgpu/cik_sdma.c 	ib->ptr[ib->length_dw++] = 0; /* src/dst endian swap */
ib                738 drivers/gpu/drm/amd/amdgpu/cik_sdma.c 	ib->ptr[ib->length_dw++] = lower_32_bits(src);
ib                739 drivers/gpu/drm/amd/amdgpu/cik_sdma.c 	ib->ptr[ib->length_dw++] = upper_32_bits(src);
ib                740 drivers/gpu/drm/amd/amdgpu/cik_sdma.c 	ib->ptr[ib->length_dw++] = lower_32_bits(pe);
ib                741 drivers/gpu/drm/amd/amdgpu/cik_sdma.c 	ib->ptr[ib->length_dw++] = upper_32_bits(pe);
ib                755 drivers/gpu/drm/amd/amdgpu/cik_sdma.c static void cik_sdma_vm_write_pte(struct amdgpu_ib *ib, uint64_t pe,
ib                761 drivers/gpu/drm/amd/amdgpu/cik_sdma.c 	ib->ptr[ib->length_dw++] = SDMA_PACKET(SDMA_OPCODE_WRITE,
ib                763 drivers/gpu/drm/amd/amdgpu/cik_sdma.c 	ib->ptr[ib->length_dw++] = lower_32_bits(pe);
ib                764 drivers/gpu/drm/amd/amdgpu/cik_sdma.c 	ib->ptr[ib->length_dw++] = upper_32_bits(pe);
ib                765 drivers/gpu/drm/amd/amdgpu/cik_sdma.c 	ib->ptr[ib->length_dw++] = ndw;
ib                767 drivers/gpu/drm/amd/amdgpu/cik_sdma.c 		ib->ptr[ib->length_dw++] = lower_32_bits(value);
ib                768 drivers/gpu/drm/amd/amdgpu/cik_sdma.c 		ib->ptr[ib->length_dw++] = upper_32_bits(value);
ib                785 drivers/gpu/drm/amd/amdgpu/cik_sdma.c static void cik_sdma_vm_set_pte_pde(struct amdgpu_ib *ib, uint64_t pe,
ib                790 drivers/gpu/drm/amd/amdgpu/cik_sdma.c 	ib->ptr[ib->length_dw++] = SDMA_PACKET(SDMA_OPCODE_GENERATE_PTE_PDE, 0, 0);
ib                791 drivers/gpu/drm/amd/amdgpu/cik_sdma.c 	ib->ptr[ib->length_dw++] = lower_32_bits(pe); /* dst addr */
ib                792 drivers/gpu/drm/amd/amdgpu/cik_sdma.c 	ib->ptr[ib->length_dw++] = upper_32_bits(pe);
ib                793 drivers/gpu/drm/amd/amdgpu/cik_sdma.c 	ib->ptr[ib->length_dw++] = lower_32_bits(flags); /* mask */
ib                794 drivers/gpu/drm/amd/amdgpu/cik_sdma.c 	ib->ptr[ib->length_dw++] = upper_32_bits(flags);
ib                795 drivers/gpu/drm/amd/amdgpu/cik_sdma.c 	ib->ptr[ib->length_dw++] = lower_32_bits(addr); /* value */
ib                796 drivers/gpu/drm/amd/amdgpu/cik_sdma.c 	ib->ptr[ib->length_dw++] = upper_32_bits(addr);
ib                797 drivers/gpu/drm/amd/amdgpu/cik_sdma.c 	ib->ptr[ib->length_dw++] = incr; /* increment size */
ib                798 drivers/gpu/drm/amd/amdgpu/cik_sdma.c 	ib->ptr[ib->length_dw++] = 0;
ib                799 drivers/gpu/drm/amd/amdgpu/cik_sdma.c 	ib->ptr[ib->length_dw++] = count; /* number of entries */
ib                808 drivers/gpu/drm/amd/amdgpu/cik_sdma.c static void cik_sdma_ring_pad_ib(struct amdgpu_ring *ring, struct amdgpu_ib *ib)
ib                814 drivers/gpu/drm/amd/amdgpu/cik_sdma.c 	pad_count = (-ib->length_dw) & 7;
ib                817 drivers/gpu/drm/amd/amdgpu/cik_sdma.c 			ib->ptr[ib->length_dw++] =
ib                821 drivers/gpu/drm/amd/amdgpu/cik_sdma.c 			ib->ptr[ib->length_dw++] =
ib               1313 drivers/gpu/drm/amd/amdgpu/cik_sdma.c static void cik_sdma_emit_copy_buffer(struct amdgpu_ib *ib,
ib               1318 drivers/gpu/drm/amd/amdgpu/cik_sdma.c 	ib->ptr[ib->length_dw++] = SDMA_PACKET(SDMA_OPCODE_COPY, SDMA_COPY_SUB_OPCODE_LINEAR, 0);
ib               1319 drivers/gpu/drm/amd/amdgpu/cik_sdma.c 	ib->ptr[ib->length_dw++] = byte_count;
ib               1320 drivers/gpu/drm/amd/amdgpu/cik_sdma.c 	ib->ptr[ib->length_dw++] = 0; /* src/dst endian swap */
ib               1321 drivers/gpu/drm/amd/amdgpu/cik_sdma.c 	ib->ptr[ib->length_dw++] = lower_32_bits(src_offset);
ib               1322 drivers/gpu/drm/amd/amdgpu/cik_sdma.c 	ib->ptr[ib->length_dw++] = upper_32_bits(src_offset);
ib               1323 drivers/gpu/drm/amd/amdgpu/cik_sdma.c 	ib->ptr[ib->length_dw++] = lower_32_bits(dst_offset);
ib               1324 drivers/gpu/drm/amd/amdgpu/cik_sdma.c 	ib->ptr[ib->length_dw++] = upper_32_bits(dst_offset);
ib               1337 drivers/gpu/drm/amd/amdgpu/cik_sdma.c static void cik_sdma_emit_fill_buffer(struct amdgpu_ib *ib,
ib               1342 drivers/gpu/drm/amd/amdgpu/cik_sdma.c 	ib->ptr[ib->length_dw++] = SDMA_PACKET(SDMA_OPCODE_CONSTANT_FILL, 0, 0);
ib               1343 drivers/gpu/drm/amd/amdgpu/cik_sdma.c 	ib->ptr[ib->length_dw++] = lower_32_bits(dst_offset);
ib               1344 drivers/gpu/drm/amd/amdgpu/cik_sdma.c 	ib->ptr[ib->length_dw++] = upper_32_bits(dst_offset);
ib               1345 drivers/gpu/drm/amd/amdgpu/cik_sdma.c 	ib->ptr[ib->length_dw++] = src_data;
ib               1346 drivers/gpu/drm/amd/amdgpu/cik_sdma.c 	ib->ptr[ib->length_dw++] = byte_count;
ib                491 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c 	struct amdgpu_ib ib;
ib                505 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c 	memset(&ib, 0, sizeof(ib));
ib                506 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c 	r = amdgpu_ib_get(adev, NULL, 256, &ib);
ib                512 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c 	ib.ptr[0] = PACKET3(PACKET3_SET_UCONFIG_REG, 1);
ib                513 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c 	ib.ptr[1] = ((scratch - PACKET3_SET_UCONFIG_REG_START));
ib                514 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c 	ib.ptr[2] = 0xDEADBEEF;
ib                515 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c 	ib.length_dw = 3;
ib                517 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c 	r = amdgpu_ib_schedule(ring, 1, &ib, NULL, &f);
ib                541 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c 	amdgpu_ib_free(adev, &ib, NULL);
ib               4454 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c 				       struct amdgpu_ib *ib,
ib               4460 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c 	if (ib->flags & AMDGPU_IB_FLAG_CE)
ib               4465 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c 	control |= ib->length_dw | (vmid << 24);
ib               4467 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c 	if (amdgpu_mcbp && (ib->flags & AMDGPU_IB_FLAG_PREEMPT)) {
ib               4473 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c 		if (!(ib->flags & AMDGPU_IB_FLAG_CE))
ib               4479 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c 	BUG_ON(ib->gpu_addr & 0x3); /* Dword align */
ib               4484 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c 		lower_32_bits(ib->gpu_addr));
ib               4485 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c 	amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr));
ib               4491 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c 					   struct amdgpu_ib *ib,
ib               4495 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c 	u32 control = INDIRECT_BUFFER_VALID | ib->length_dw | (vmid << 24);
ib               4507 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c 	if (ib->flags & AMDGPU_IB_FLAG_RESET_GDS_MAX_WAVE_ID) {
ib               4514 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c 	BUG_ON(ib->gpu_addr & 0x3); /* Dword align */
ib               4519 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c 				lower_32_bits(ib->gpu_addr));
ib               4520 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c 	amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr));
ib               1865 drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c 				  struct amdgpu_ib *ib,
ib               1877 drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c 	if (ib->flags & AMDGPU_IB_FLAG_CE)
ib               1882 drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c 	control |= ib->length_dw | (vmid << 24);
ib               1889 drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c 			  (ib->gpu_addr & 0xFFFFFFFC));
ib               1890 drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c 	amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFFFF);
ib               1906 drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c 	struct amdgpu_ib ib;
ib               1917 drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c 	memset(&ib, 0, sizeof(ib));
ib               1918 drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c 	r = amdgpu_ib_get(adev, NULL, 256, &ib);
ib               1922 drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c 	ib.ptr[0] = PACKET3(PACKET3_SET_CONFIG_REG, 1);
ib               1923 drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c 	ib.ptr[1] = ((scratch - PACKET3_SET_CONFIG_REG_START));
ib               1924 drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c 	ib.ptr[2] = 0xDEADBEEF;
ib               1925 drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c 	ib.length_dw = 3;
ib               1927 drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c 	r = amdgpu_ib_schedule(ring, 1, &ib, NULL, &f);
ib               1945 drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c 	amdgpu_ib_free(adev, &ib, NULL);
ib               2260 drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c 					struct amdgpu_ib *ib,
ib               2272 drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c 	if (ib->flags & AMDGPU_IB_FLAG_CE)
ib               2277 drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c 	control |= ib->length_dw | (vmid << 24);
ib               2284 drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c 			  (ib->gpu_addr & 0xFFFFFFFC));
ib               2285 drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c 	amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFFFF);
ib               2291 drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c 					  struct amdgpu_ib *ib,
ib               2295 drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c 	u32 control = INDIRECT_BUFFER_VALID | ib->length_dw | (vmid << 24);
ib               2307 drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c 	if (ib->flags & AMDGPU_IB_FLAG_RESET_GDS_MAX_WAVE_ID) {
ib               2318 drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c 					  (ib->gpu_addr & 0xFFFFFFFC));
ib               2319 drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c 	amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFFFF);
ib               2355 drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c 	struct amdgpu_ib ib;
ib               2366 drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c 	memset(&ib, 0, sizeof(ib));
ib               2367 drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c 	r = amdgpu_ib_get(adev, NULL, 256, &ib);
ib               2371 drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c 	ib.ptr[0] = PACKET3(PACKET3_SET_UCONFIG_REG, 1);
ib               2372 drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c 	ib.ptr[1] = ((scratch - PACKET3_SET_UCONFIG_REG_START));
ib               2373 drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c 	ib.ptr[2] = 0xDEADBEEF;
ib               2374 drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c 	ib.length_dw = 3;
ib               2376 drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c 	r = amdgpu_ib_schedule(ring, 1, &ib, NULL, &f);
ib               2394 drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c 	amdgpu_ib_free(adev, &ib, NULL);
ib                876 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 	struct amdgpu_ib ib;
ib                890 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 	memset(&ib, 0, sizeof(ib));
ib                891 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 	r = amdgpu_ib_get(adev, NULL, 16, &ib);
ib                895 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 	ib.ptr[0] = PACKET3(PACKET3_WRITE_DATA, 3);
ib                896 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 	ib.ptr[1] = WRITE_DATA_DST_SEL(5) | WR_CONFIRM;
ib                897 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 	ib.ptr[2] = lower_32_bits(gpu_addr);
ib                898 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 	ib.ptr[3] = upper_32_bits(gpu_addr);
ib                899 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 	ib.ptr[4] = 0xDEADBEEF;
ib                900 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 	ib.length_dw = 5;
ib                902 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 	r = amdgpu_ib_schedule(ring, 1, &ib, NULL, &f);
ib                921 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 	amdgpu_ib_free(adev, &ib, NULL);
ib               1550 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 	struct amdgpu_ib ib;
ib               1581 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 	memset(&ib, 0, sizeof(ib));
ib               1582 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 	r = amdgpu_ib_get(adev, NULL, total_size, &ib);
ib               1590 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 		ib.ptr[i + (vgpr_offset / 4)] = vgpr_init_compute_shader[i];
ib               1593 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 		ib.ptr[i + (sgpr_offset / 4)] = sgpr_init_compute_shader[i];
ib               1596 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 	ib.length_dw = 0;
ib               1601 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 		ib.ptr[ib.length_dw++] = PACKET3(PACKET3_SET_SH_REG, 1);
ib               1602 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 		ib.ptr[ib.length_dw++] = vgpr_init_regs[i] - PACKET3_SET_SH_REG_START;
ib               1603 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 		ib.ptr[ib.length_dw++] = vgpr_init_regs[i + 1];
ib               1606 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 	gpu_addr = (ib.gpu_addr + (u64)vgpr_offset) >> 8;
ib               1607 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 	ib.ptr[ib.length_dw++] = PACKET3(PACKET3_SET_SH_REG, 2);
ib               1608 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 	ib.ptr[ib.length_dw++] = mmCOMPUTE_PGM_LO - PACKET3_SET_SH_REG_START;
ib               1609 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 	ib.ptr[ib.length_dw++] = lower_32_bits(gpu_addr);
ib               1610 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 	ib.ptr[ib.length_dw++] = upper_32_bits(gpu_addr);
ib               1613 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 	ib.ptr[ib.length_dw++] = PACKET3(PACKET3_DISPATCH_DIRECT, 3);
ib               1614 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 	ib.ptr[ib.length_dw++] = 8; /* x */
ib               1615 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 	ib.ptr[ib.length_dw++] = 1; /* y */
ib               1616 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 	ib.ptr[ib.length_dw++] = 1; /* z */
ib               1617 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 	ib.ptr[ib.length_dw++] =
ib               1621 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 	ib.ptr[ib.length_dw++] = PACKET3(PACKET3_EVENT_WRITE, 0);
ib               1622 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 	ib.ptr[ib.length_dw++] = EVENT_TYPE(7) | EVENT_INDEX(4);
ib               1627 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 		ib.ptr[ib.length_dw++] = PACKET3(PACKET3_SET_SH_REG, 1);
ib               1628 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 		ib.ptr[ib.length_dw++] = sgpr1_init_regs[i] - PACKET3_SET_SH_REG_START;
ib               1629 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 		ib.ptr[ib.length_dw++] = sgpr1_init_regs[i + 1];
ib               1632 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 	gpu_addr = (ib.gpu_addr + (u64)sgpr_offset) >> 8;
ib               1633 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 	ib.ptr[ib.length_dw++] = PACKET3(PACKET3_SET_SH_REG, 2);
ib               1634 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 	ib.ptr[ib.length_dw++] = mmCOMPUTE_PGM_LO - PACKET3_SET_SH_REG_START;
ib               1635 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 	ib.ptr[ib.length_dw++] = lower_32_bits(gpu_addr);
ib               1636 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 	ib.ptr[ib.length_dw++] = upper_32_bits(gpu_addr);
ib               1639 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 	ib.ptr[ib.length_dw++] = PACKET3(PACKET3_DISPATCH_DIRECT, 3);
ib               1640 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 	ib.ptr[ib.length_dw++] = 8; /* x */
ib               1641 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 	ib.ptr[ib.length_dw++] = 1; /* y */
ib               1642 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 	ib.ptr[ib.length_dw++] = 1; /* z */
ib               1643 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 	ib.ptr[ib.length_dw++] =
ib               1647 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 	ib.ptr[ib.length_dw++] = PACKET3(PACKET3_EVENT_WRITE, 0);
ib               1648 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 	ib.ptr[ib.length_dw++] = EVENT_TYPE(7) | EVENT_INDEX(4);
ib               1653 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 		ib.ptr[ib.length_dw++] = PACKET3(PACKET3_SET_SH_REG, 1);
ib               1654 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 		ib.ptr[ib.length_dw++] = sgpr2_init_regs[i] - PACKET3_SET_SH_REG_START;
ib               1655 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 		ib.ptr[ib.length_dw++] = sgpr2_init_regs[i + 1];
ib               1658 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 	gpu_addr = (ib.gpu_addr + (u64)sgpr_offset) >> 8;
ib               1659 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 	ib.ptr[ib.length_dw++] = PACKET3(PACKET3_SET_SH_REG, 2);
ib               1660 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 	ib.ptr[ib.length_dw++] = mmCOMPUTE_PGM_LO - PACKET3_SET_SH_REG_START;
ib               1661 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 	ib.ptr[ib.length_dw++] = lower_32_bits(gpu_addr);
ib               1662 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 	ib.ptr[ib.length_dw++] = upper_32_bits(gpu_addr);
ib               1665 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 	ib.ptr[ib.length_dw++] = PACKET3(PACKET3_DISPATCH_DIRECT, 3);
ib               1666 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 	ib.ptr[ib.length_dw++] = 8; /* x */
ib               1667 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 	ib.ptr[ib.length_dw++] = 1; /* y */
ib               1668 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 	ib.ptr[ib.length_dw++] = 1; /* z */
ib               1669 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 	ib.ptr[ib.length_dw++] =
ib               1673 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 	ib.ptr[ib.length_dw++] = PACKET3(PACKET3_EVENT_WRITE, 0);
ib               1674 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 	ib.ptr[ib.length_dw++] = EVENT_TYPE(7) | EVENT_INDEX(4);
ib               1677 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 	r = amdgpu_ib_schedule(ring, 1, &ib, NULL, &f);
ib               1704 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 	amdgpu_ib_free(adev, &ib, NULL);
ib               6117 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 					struct amdgpu_ib *ib,
ib               6123 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 	if (ib->flags & AMDGPU_IB_FLAG_CE)
ib               6128 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 	control |= ib->length_dw | (vmid << 24);
ib               6130 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 	if (amdgpu_sriov_vf(ring->adev) && (ib->flags & AMDGPU_IB_FLAG_PREEMPT)) {
ib               6133 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 		if (!(ib->flags & AMDGPU_IB_FLAG_CE))
ib               6142 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 			  (ib->gpu_addr & 0xFFFFFFFC));
ib               6143 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 	amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFFFF);
ib               6149 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 					  struct amdgpu_ib *ib,
ib               6153 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 	u32 control = INDIRECT_BUFFER_VALID | ib->length_dw | (vmid << 24);
ib               6165 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 	if (ib->flags & AMDGPU_IB_FLAG_RESET_GDS_MAX_WAVE_ID) {
ib               6176 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 				(ib->gpu_addr & 0xFFFFFFFC));
ib               6177 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c 	amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFFFF);
ib                879 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 	struct amdgpu_ib ib;
ib                893 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 	memset(&ib, 0, sizeof(ib));
ib                894 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 	r = amdgpu_ib_get(adev, NULL, 16, &ib);
ib                898 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 	ib.ptr[0] = PACKET3(PACKET3_WRITE_DATA, 3);
ib                899 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 	ib.ptr[1] = WRITE_DATA_DST_SEL(5) | WR_CONFIRM;
ib                900 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 	ib.ptr[2] = lower_32_bits(gpu_addr);
ib                901 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 	ib.ptr[3] = upper_32_bits(gpu_addr);
ib                902 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 	ib.ptr[4] = 0xDEADBEEF;
ib                903 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 	ib.length_dw = 5;
ib                905 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 	r = amdgpu_ib_schedule(ring, 1, &ib, NULL, &f);
ib                924 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 	amdgpu_ib_free(adev, &ib, NULL);
ib               4266 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 	struct amdgpu_ib ib;
ib               4291 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 	memset(&ib, 0, sizeof(ib));
ib               4292 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 	r = amdgpu_ib_get(adev, NULL, total_size, &ib);
ib               4300 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 		ib.ptr[i + (vgpr_offset / 4)] = vgpr_init_compute_shader[i];
ib               4303 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 		ib.ptr[i + (sgpr_offset / 4)] = sgpr_init_compute_shader[i];
ib               4306 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 	ib.length_dw = 0;
ib               4311 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 		ib.ptr[ib.length_dw++] = PACKET3(PACKET3_SET_SH_REG, 1);
ib               4312 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 		ib.ptr[ib.length_dw++] = SOC15_REG_ENTRY_OFFSET(vgpr_init_regs[i])
ib               4314 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 		ib.ptr[ib.length_dw++] = vgpr_init_regs[i].reg_value;
ib               4317 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 	gpu_addr = (ib.gpu_addr + (u64)vgpr_offset) >> 8;
ib               4318 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 	ib.ptr[ib.length_dw++] = PACKET3(PACKET3_SET_SH_REG, 2);
ib               4319 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 	ib.ptr[ib.length_dw++] = SOC15_REG_OFFSET(GC, 0, mmCOMPUTE_PGM_LO)
ib               4321 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 	ib.ptr[ib.length_dw++] = lower_32_bits(gpu_addr);
ib               4322 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 	ib.ptr[ib.length_dw++] = upper_32_bits(gpu_addr);
ib               4325 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 	ib.ptr[ib.length_dw++] = PACKET3(PACKET3_DISPATCH_DIRECT, 3);
ib               4326 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 	ib.ptr[ib.length_dw++] = 128; /* x */
ib               4327 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 	ib.ptr[ib.length_dw++] = 1; /* y */
ib               4328 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 	ib.ptr[ib.length_dw++] = 1; /* z */
ib               4329 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 	ib.ptr[ib.length_dw++] =
ib               4333 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 	ib.ptr[ib.length_dw++] = PACKET3(PACKET3_EVENT_WRITE, 0);
ib               4334 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 	ib.ptr[ib.length_dw++] = EVENT_TYPE(7) | EVENT_INDEX(4);
ib               4339 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 		ib.ptr[ib.length_dw++] = PACKET3(PACKET3_SET_SH_REG, 1);
ib               4340 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 		ib.ptr[ib.length_dw++] = SOC15_REG_ENTRY_OFFSET(sgpr_init_regs[i])
ib               4342 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 		ib.ptr[ib.length_dw++] = sgpr_init_regs[i].reg_value;
ib               4345 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 	gpu_addr = (ib.gpu_addr + (u64)sgpr_offset) >> 8;
ib               4346 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 	ib.ptr[ib.length_dw++] = PACKET3(PACKET3_SET_SH_REG, 2);
ib               4347 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 	ib.ptr[ib.length_dw++] = SOC15_REG_OFFSET(GC, 0, mmCOMPUTE_PGM_LO)
ib               4349 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 	ib.ptr[ib.length_dw++] = lower_32_bits(gpu_addr);
ib               4350 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 	ib.ptr[ib.length_dw++] = upper_32_bits(gpu_addr);
ib               4353 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 	ib.ptr[ib.length_dw++] = PACKET3(PACKET3_DISPATCH_DIRECT, 3);
ib               4354 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 	ib.ptr[ib.length_dw++] = 128; /* x */
ib               4355 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 	ib.ptr[ib.length_dw++] = 1; /* y */
ib               4356 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 	ib.ptr[ib.length_dw++] = 1; /* z */
ib               4357 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 	ib.ptr[ib.length_dw++] =
ib               4361 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 	ib.ptr[ib.length_dw++] = PACKET3(PACKET3_EVENT_WRITE, 0);
ib               4362 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 	ib.ptr[ib.length_dw++] = EVENT_TYPE(7) | EVENT_INDEX(4);
ib               4365 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 	r = amdgpu_ib_schedule(ring, 1, &ib, NULL, &f);
ib               4392 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 	amdgpu_ib_free(adev, &ib, NULL);
ib               5013 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 					struct amdgpu_ib *ib,
ib               5019 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 	if (ib->flags & AMDGPU_IB_FLAG_CE)
ib               5024 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 	control |= ib->length_dw | (vmid << 24);
ib               5026 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 	if (amdgpu_sriov_vf(ring->adev) && (ib->flags & AMDGPU_IB_FLAG_PREEMPT)) {
ib               5029 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 		if (!(ib->flags & AMDGPU_IB_FLAG_CE))
ib               5034 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 	BUG_ON(ib->gpu_addr & 0x3); /* Dword align */
ib               5039 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 		lower_32_bits(ib->gpu_addr));
ib               5040 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 	amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr));
ib               5046 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 					  struct amdgpu_ib *ib,
ib               5050 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 	u32 control = INDIRECT_BUFFER_VALID | ib->length_dw | (vmid << 24);
ib               5062 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 	if (ib->flags & AMDGPU_IB_FLAG_RESET_GDS_MAX_WAVE_ID) {
ib               5069 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 	BUG_ON(ib->gpu_addr & 0x3); /* Dword align */
ib               5074 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 				lower_32_bits(ib->gpu_addr));
ib               5075 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 	amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr));
ib                252 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c 				   struct amdgpu_ib *ib,
ib                263 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c 	amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr) & 0xffffffe0);
ib                264 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c 	amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr));
ib                265 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c 	amdgpu_ring_write(ring, ib->length_dw);
ib                602 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c 	struct amdgpu_ib ib;
ib                616 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c 	memset(&ib, 0, sizeof(ib));
ib                617 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c 	r = amdgpu_ib_get(adev, NULL, 256, &ib);
ib                621 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c 	ib.ptr[0] = SDMA_PKT_HEADER_OP(SDMA_OP_WRITE) |
ib                623 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c 	ib.ptr[1] = lower_32_bits(gpu_addr);
ib                624 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c 	ib.ptr[2] = upper_32_bits(gpu_addr);
ib                625 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c 	ib.ptr[3] = SDMA_PKT_WRITE_UNTILED_DW_3_COUNT(1);
ib                626 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c 	ib.ptr[4] = 0xDEADBEEF;
ib                627 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c 	ib.ptr[5] = SDMA_PKT_HEADER_OP(SDMA_OP_NOP);
ib                628 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c 	ib.ptr[6] = SDMA_PKT_HEADER_OP(SDMA_OP_NOP);
ib                629 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c 	ib.ptr[7] = SDMA_PKT_HEADER_OP(SDMA_OP_NOP);
ib                630 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c 	ib.length_dw = 8;
ib                632 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c 	r = amdgpu_ib_schedule(ring, 1, &ib, NULL, &f);
ib                650 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c 	amdgpu_ib_free(adev, &ib, NULL);
ib                667 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c static void sdma_v2_4_vm_copy_pte(struct amdgpu_ib *ib,
ib                673 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c 	ib->ptr[ib->length_dw++] = SDMA_PKT_HEADER_OP(SDMA_OP_COPY) |
ib                675 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c 	ib->ptr[ib->length_dw++] = bytes;
ib                676 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c 	ib->ptr[ib->length_dw++] = 0; /* src/dst endian swap */
ib                677 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c 	ib->ptr[ib->length_dw++] = lower_32_bits(src);
ib                678 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c 	ib->ptr[ib->length_dw++] = upper_32_bits(src);
ib                679 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c 	ib->ptr[ib->length_dw++] = lower_32_bits(pe);
ib                680 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c 	ib->ptr[ib->length_dw++] = upper_32_bits(pe);
ib                694 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c static void sdma_v2_4_vm_write_pte(struct amdgpu_ib *ib, uint64_t pe,
ib                700 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c 	ib->ptr[ib->length_dw++] = SDMA_PKT_HEADER_OP(SDMA_OP_WRITE) |
ib                702 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c 	ib->ptr[ib->length_dw++] = pe;
ib                703 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c 	ib->ptr[ib->length_dw++] = upper_32_bits(pe);
ib                704 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c 	ib->ptr[ib->length_dw++] = ndw;
ib                706 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c 		ib->ptr[ib->length_dw++] = lower_32_bits(value);
ib                707 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c 		ib->ptr[ib->length_dw++] = upper_32_bits(value);
ib                724 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c static void sdma_v2_4_vm_set_pte_pde(struct amdgpu_ib *ib, uint64_t pe,
ib                729 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c 	ib->ptr[ib->length_dw++] = SDMA_PKT_HEADER_OP(SDMA_OP_GEN_PTEPDE);
ib                730 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c 	ib->ptr[ib->length_dw++] = lower_32_bits(pe); /* dst addr */
ib                731 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c 	ib->ptr[ib->length_dw++] = upper_32_bits(pe);
ib                732 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c 	ib->ptr[ib->length_dw++] = lower_32_bits(flags); /* mask */
ib                733 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c 	ib->ptr[ib->length_dw++] = upper_32_bits(flags);
ib                734 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c 	ib->ptr[ib->length_dw++] = lower_32_bits(addr); /* value */
ib                735 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c 	ib->ptr[ib->length_dw++] = upper_32_bits(addr);
ib                736 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c 	ib->ptr[ib->length_dw++] = incr; /* increment size */
ib                737 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c 	ib->ptr[ib->length_dw++] = 0;
ib                738 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c 	ib->ptr[ib->length_dw++] = count; /* number of entries */
ib                747 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c static void sdma_v2_4_ring_pad_ib(struct amdgpu_ring *ring, struct amdgpu_ib *ib)
ib                753 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c 	pad_count = (-ib->length_dw) & 7;
ib                756 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c 			ib->ptr[ib->length_dw++] =
ib                760 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c 			ib->ptr[ib->length_dw++] =
ib               1200 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c static void sdma_v2_4_emit_copy_buffer(struct amdgpu_ib *ib,
ib               1205 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c 	ib->ptr[ib->length_dw++] = SDMA_PKT_HEADER_OP(SDMA_OP_COPY) |
ib               1207 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c 	ib->ptr[ib->length_dw++] = byte_count;
ib               1208 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c 	ib->ptr[ib->length_dw++] = 0; /* src/dst endian swap */
ib               1209 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c 	ib->ptr[ib->length_dw++] = lower_32_bits(src_offset);
ib               1210 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c 	ib->ptr[ib->length_dw++] = upper_32_bits(src_offset);
ib               1211 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c 	ib->ptr[ib->length_dw++] = lower_32_bits(dst_offset);
ib               1212 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c 	ib->ptr[ib->length_dw++] = upper_32_bits(dst_offset);
ib               1225 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c static void sdma_v2_4_emit_fill_buffer(struct amdgpu_ib *ib,
ib               1230 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c 	ib->ptr[ib->length_dw++] = SDMA_PKT_HEADER_OP(SDMA_OP_CONST_FILL);
ib               1231 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c 	ib->ptr[ib->length_dw++] = lower_32_bits(dst_offset);
ib               1232 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c 	ib->ptr[ib->length_dw++] = upper_32_bits(dst_offset);
ib               1233 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c 	ib->ptr[ib->length_dw++] = src_data;
ib               1234 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c 	ib->ptr[ib->length_dw++] = byte_count;
ib                426 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c 				   struct amdgpu_ib *ib,
ib                437 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c 	amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr) & 0xffffffe0);
ib                438 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c 	amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr));
ib                439 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c 	amdgpu_ring_write(ring, ib->length_dw);
ib                874 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c 	struct amdgpu_ib ib;
ib                888 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c 	memset(&ib, 0, sizeof(ib));
ib                889 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c 	r = amdgpu_ib_get(adev, NULL, 256, &ib);
ib                893 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c 	ib.ptr[0] = SDMA_PKT_HEADER_OP(SDMA_OP_WRITE) |
ib                895 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c 	ib.ptr[1] = lower_32_bits(gpu_addr);
ib                896 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c 	ib.ptr[2] = upper_32_bits(gpu_addr);
ib                897 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c 	ib.ptr[3] = SDMA_PKT_WRITE_UNTILED_DW_3_COUNT(1);
ib                898 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c 	ib.ptr[4] = 0xDEADBEEF;
ib                899 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c 	ib.ptr[5] = SDMA_PKT_NOP_HEADER_OP(SDMA_OP_NOP);
ib                900 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c 	ib.ptr[6] = SDMA_PKT_NOP_HEADER_OP(SDMA_OP_NOP);
ib                901 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c 	ib.ptr[7] = SDMA_PKT_NOP_HEADER_OP(SDMA_OP_NOP);
ib                902 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c 	ib.length_dw = 8;
ib                904 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c 	r = amdgpu_ib_schedule(ring, 1, &ib, NULL, &f);
ib                921 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c 	amdgpu_ib_free(adev, &ib, NULL);
ib                938 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c static void sdma_v3_0_vm_copy_pte(struct amdgpu_ib *ib,
ib                944 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c 	ib->ptr[ib->length_dw++] = SDMA_PKT_HEADER_OP(SDMA_OP_COPY) |
ib                946 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c 	ib->ptr[ib->length_dw++] = bytes;
ib                947 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c 	ib->ptr[ib->length_dw++] = 0; /* src/dst endian swap */
ib                948 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c 	ib->ptr[ib->length_dw++] = lower_32_bits(src);
ib                949 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c 	ib->ptr[ib->length_dw++] = upper_32_bits(src);
ib                950 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c 	ib->ptr[ib->length_dw++] = lower_32_bits(pe);
ib                951 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c 	ib->ptr[ib->length_dw++] = upper_32_bits(pe);
ib                965 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c static void sdma_v3_0_vm_write_pte(struct amdgpu_ib *ib, uint64_t pe,
ib                971 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c 	ib->ptr[ib->length_dw++] = SDMA_PKT_HEADER_OP(SDMA_OP_WRITE) |
ib                973 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c 	ib->ptr[ib->length_dw++] = lower_32_bits(pe);
ib                974 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c 	ib->ptr[ib->length_dw++] = upper_32_bits(pe);
ib                975 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c 	ib->ptr[ib->length_dw++] = ndw;
ib                977 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c 		ib->ptr[ib->length_dw++] = lower_32_bits(value);
ib                978 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c 		ib->ptr[ib->length_dw++] = upper_32_bits(value);
ib                995 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c static void sdma_v3_0_vm_set_pte_pde(struct amdgpu_ib *ib, uint64_t pe,
ib               1000 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c 	ib->ptr[ib->length_dw++] = SDMA_PKT_HEADER_OP(SDMA_OP_GEN_PTEPDE);
ib               1001 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c 	ib->ptr[ib->length_dw++] = lower_32_bits(pe); /* dst addr */
ib               1002 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c 	ib->ptr[ib->length_dw++] = upper_32_bits(pe);
ib               1003 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c 	ib->ptr[ib->length_dw++] = lower_32_bits(flags); /* mask */
ib               1004 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c 	ib->ptr[ib->length_dw++] = upper_32_bits(flags);
ib               1005 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c 	ib->ptr[ib->length_dw++] = lower_32_bits(addr); /* value */
ib               1006 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c 	ib->ptr[ib->length_dw++] = upper_32_bits(addr);
ib               1007 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c 	ib->ptr[ib->length_dw++] = incr; /* increment size */
ib               1008 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c 	ib->ptr[ib->length_dw++] = 0;
ib               1009 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c 	ib->ptr[ib->length_dw++] = count; /* number of entries */
ib               1018 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c static void sdma_v3_0_ring_pad_ib(struct amdgpu_ring *ring, struct amdgpu_ib *ib)
ib               1024 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c 	pad_count = (-ib->length_dw) & 7;
ib               1027 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c 			ib->ptr[ib->length_dw++] =
ib               1031 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c 			ib->ptr[ib->length_dw++] =
ib               1638 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c static void sdma_v3_0_emit_copy_buffer(struct amdgpu_ib *ib,
ib               1643 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c 	ib->ptr[ib->length_dw++] = SDMA_PKT_HEADER_OP(SDMA_OP_COPY) |
ib               1645 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c 	ib->ptr[ib->length_dw++] = byte_count;
ib               1646 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c 	ib->ptr[ib->length_dw++] = 0; /* src/dst endian swap */
ib               1647 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c 	ib->ptr[ib->length_dw++] = lower_32_bits(src_offset);
ib               1648 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c 	ib->ptr[ib->length_dw++] = upper_32_bits(src_offset);
ib               1649 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c 	ib->ptr[ib->length_dw++] = lower_32_bits(dst_offset);
ib               1650 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c 	ib->ptr[ib->length_dw++] = upper_32_bits(dst_offset);
ib               1663 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c static void sdma_v3_0_emit_fill_buffer(struct amdgpu_ib *ib,
ib               1668 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c 	ib->ptr[ib->length_dw++] = SDMA_PKT_HEADER_OP(SDMA_OP_CONST_FILL);
ib               1669 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c 	ib->ptr[ib->length_dw++] = lower_32_bits(dst_offset);
ib               1670 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c 	ib->ptr[ib->length_dw++] = upper_32_bits(dst_offset);
ib               1671 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c 	ib->ptr[ib->length_dw++] = src_data;
ib               1672 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c 	ib->ptr[ib->length_dw++] = byte_count;
ib                695 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c 				   struct amdgpu_ib *ib,
ib                706 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c 	amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr) & 0xffffffe0);
ib                707 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c 	amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr));
ib                708 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c 	amdgpu_ring_write(ring, ib->length_dw);
ib               1427 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c 	struct amdgpu_ib ib;
ib               1441 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c 	memset(&ib, 0, sizeof(ib));
ib               1442 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c 	r = amdgpu_ib_get(adev, NULL, 256, &ib);
ib               1446 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c 	ib.ptr[0] = SDMA_PKT_HEADER_OP(SDMA_OP_WRITE) |
ib               1448 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c 	ib.ptr[1] = lower_32_bits(gpu_addr);
ib               1449 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c 	ib.ptr[2] = upper_32_bits(gpu_addr);
ib               1450 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c 	ib.ptr[3] = SDMA_PKT_WRITE_UNTILED_DW_3_COUNT(0);
ib               1451 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c 	ib.ptr[4] = 0xDEADBEEF;
ib               1452 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c 	ib.ptr[5] = SDMA_PKT_NOP_HEADER_OP(SDMA_OP_NOP);
ib               1453 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c 	ib.ptr[6] = SDMA_PKT_NOP_HEADER_OP(SDMA_OP_NOP);
ib               1454 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c 	ib.ptr[7] = SDMA_PKT_NOP_HEADER_OP(SDMA_OP_NOP);
ib               1455 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c 	ib.length_dw = 8;
ib               1457 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c 	r = amdgpu_ib_schedule(ring, 1, &ib, NULL, &f);
ib               1475 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c 	amdgpu_ib_free(adev, &ib, NULL);
ib               1493 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c static void sdma_v4_0_vm_copy_pte(struct amdgpu_ib *ib,
ib               1499 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c 	ib->ptr[ib->length_dw++] = SDMA_PKT_HEADER_OP(SDMA_OP_COPY) |
ib               1501 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c 	ib->ptr[ib->length_dw++] = bytes - 1;
ib               1502 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c 	ib->ptr[ib->length_dw++] = 0; /* src/dst endian swap */
ib               1503 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c 	ib->ptr[ib->length_dw++] = lower_32_bits(src);
ib               1504 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c 	ib->ptr[ib->length_dw++] = upper_32_bits(src);
ib               1505 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c 	ib->ptr[ib->length_dw++] = lower_32_bits(pe);
ib               1506 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c 	ib->ptr[ib->length_dw++] = upper_32_bits(pe);
ib               1522 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c static void sdma_v4_0_vm_write_pte(struct amdgpu_ib *ib, uint64_t pe,
ib               1528 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c 	ib->ptr[ib->length_dw++] = SDMA_PKT_HEADER_OP(SDMA_OP_WRITE) |
ib               1530 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c 	ib->ptr[ib->length_dw++] = lower_32_bits(pe);
ib               1531 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c 	ib->ptr[ib->length_dw++] = upper_32_bits(pe);
ib               1532 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c 	ib->ptr[ib->length_dw++] = ndw - 1;
ib               1534 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c 		ib->ptr[ib->length_dw++] = lower_32_bits(value);
ib               1535 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c 		ib->ptr[ib->length_dw++] = upper_32_bits(value);
ib               1552 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c static void sdma_v4_0_vm_set_pte_pde(struct amdgpu_ib *ib,
ib               1558 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c 	ib->ptr[ib->length_dw++] = SDMA_PKT_HEADER_OP(SDMA_OP_PTEPDE);
ib               1559 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c 	ib->ptr[ib->length_dw++] = lower_32_bits(pe); /* dst addr */
ib               1560 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c 	ib->ptr[ib->length_dw++] = upper_32_bits(pe);
ib               1561 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c 	ib->ptr[ib->length_dw++] = lower_32_bits(flags); /* mask */
ib               1562 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c 	ib->ptr[ib->length_dw++] = upper_32_bits(flags);
ib               1563 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c 	ib->ptr[ib->length_dw++] = lower_32_bits(addr); /* value */
ib               1564 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c 	ib->ptr[ib->length_dw++] = upper_32_bits(addr);
ib               1565 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c 	ib->ptr[ib->length_dw++] = incr; /* increment size */
ib               1566 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c 	ib->ptr[ib->length_dw++] = 0;
ib               1567 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c 	ib->ptr[ib->length_dw++] = count - 1; /* number of entries */
ib               1576 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c static void sdma_v4_0_ring_pad_ib(struct amdgpu_ring *ring, struct amdgpu_ib *ib)
ib               1582 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c 	pad_count = (-ib->length_dw) & 7;
ib               1585 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c 			ib->ptr[ib->length_dw++] =
ib               1589 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c 			ib->ptr[ib->length_dw++] =
ib               2460 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c static void sdma_v4_0_emit_copy_buffer(struct amdgpu_ib *ib,
ib               2465 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c 	ib->ptr[ib->length_dw++] = SDMA_PKT_HEADER_OP(SDMA_OP_COPY) |
ib               2467 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c 	ib->ptr[ib->length_dw++] = byte_count - 1;
ib               2468 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c 	ib->ptr[ib->length_dw++] = 0; /* src/dst endian swap */
ib               2469 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c 	ib->ptr[ib->length_dw++] = lower_32_bits(src_offset);
ib               2470 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c 	ib->ptr[ib->length_dw++] = upper_32_bits(src_offset);
ib               2471 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c 	ib->ptr[ib->length_dw++] = lower_32_bits(dst_offset);
ib               2472 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c 	ib->ptr[ib->length_dw++] = upper_32_bits(dst_offset);
ib               2485 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c static void sdma_v4_0_emit_fill_buffer(struct amdgpu_ib *ib,
ib               2490 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c 	ib->ptr[ib->length_dw++] = SDMA_PKT_HEADER_OP(SDMA_OP_CONST_FILL);
ib               2491 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c 	ib->ptr[ib->length_dw++] = lower_32_bits(dst_offset);
ib               2492 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c 	ib->ptr[ib->length_dw++] = upper_32_bits(dst_offset);
ib               2493 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c 	ib->ptr[ib->length_dw++] = src_data;
ib               2494 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c 	ib->ptr[ib->length_dw++] = byte_count - 1;
ib                379 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c 				   struct amdgpu_ib *ib,
ib                410 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c 	amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr) & 0xffffffe0);
ib                411 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c 	amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr));
ib                412 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c 	amdgpu_ring_write(ring, ib->length_dw);
ib                955 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c 	struct amdgpu_ib ib;
ib                971 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c 	memset(&ib, 0, sizeof(ib));
ib                972 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c 	r = amdgpu_ib_get(adev, NULL, 256, &ib);
ib                978 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c 	ib.ptr[0] = SDMA_PKT_HEADER_OP(SDMA_OP_WRITE) |
ib                980 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c 	ib.ptr[1] = lower_32_bits(gpu_addr);
ib                981 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c 	ib.ptr[2] = upper_32_bits(gpu_addr);
ib                982 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c 	ib.ptr[3] = SDMA_PKT_WRITE_UNTILED_DW_3_COUNT(0);
ib                983 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c 	ib.ptr[4] = 0xDEADBEEF;
ib                984 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c 	ib.ptr[5] = SDMA_PKT_NOP_HEADER_OP(SDMA_OP_NOP);
ib                985 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c 	ib.ptr[6] = SDMA_PKT_NOP_HEADER_OP(SDMA_OP_NOP);
ib                986 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c 	ib.ptr[7] = SDMA_PKT_NOP_HEADER_OP(SDMA_OP_NOP);
ib                987 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c 	ib.length_dw = 8;
ib                989 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c 	r = amdgpu_ib_schedule(ring, 1, &ib, NULL, &f);
ib               1012 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c 	amdgpu_ib_free(adev, &ib, NULL);
ib               1030 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c static void sdma_v5_0_vm_copy_pte(struct amdgpu_ib *ib,
ib               1036 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c 	ib->ptr[ib->length_dw++] = SDMA_PKT_HEADER_OP(SDMA_OP_COPY) |
ib               1038 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c 	ib->ptr[ib->length_dw++] = bytes - 1;
ib               1039 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c 	ib->ptr[ib->length_dw++] = 0; /* src/dst endian swap */
ib               1040 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c 	ib->ptr[ib->length_dw++] = lower_32_bits(src);
ib               1041 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c 	ib->ptr[ib->length_dw++] = upper_32_bits(src);
ib               1042 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c 	ib->ptr[ib->length_dw++] = lower_32_bits(pe);
ib               1043 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c 	ib->ptr[ib->length_dw++] = upper_32_bits(pe);
ib               1059 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c static void sdma_v5_0_vm_write_pte(struct amdgpu_ib *ib, uint64_t pe,
ib               1065 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c 	ib->ptr[ib->length_dw++] = SDMA_PKT_HEADER_OP(SDMA_OP_WRITE) |
ib               1067 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c 	ib->ptr[ib->length_dw++] = lower_32_bits(pe);
ib               1068 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c 	ib->ptr[ib->length_dw++] = upper_32_bits(pe);
ib               1069 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c 	ib->ptr[ib->length_dw++] = ndw - 1;
ib               1071 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c 		ib->ptr[ib->length_dw++] = lower_32_bits(value);
ib               1072 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c 		ib->ptr[ib->length_dw++] = upper_32_bits(value);
ib               1089 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c static void sdma_v5_0_vm_set_pte_pde(struct amdgpu_ib *ib,
ib               1095 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c 	ib->ptr[ib->length_dw++] = SDMA_PKT_HEADER_OP(SDMA_OP_PTEPDE);
ib               1096 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c 	ib->ptr[ib->length_dw++] = lower_32_bits(pe); /* dst addr */
ib               1097 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c 	ib->ptr[ib->length_dw++] = upper_32_bits(pe);
ib               1098 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c 	ib->ptr[ib->length_dw++] = lower_32_bits(flags); /* mask */
ib               1099 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c 	ib->ptr[ib->length_dw++] = upper_32_bits(flags);
ib               1100 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c 	ib->ptr[ib->length_dw++] = lower_32_bits(addr); /* value */
ib               1101 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c 	ib->ptr[ib->length_dw++] = upper_32_bits(addr);
ib               1102 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c 	ib->ptr[ib->length_dw++] = incr; /* increment size */
ib               1103 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c 	ib->ptr[ib->length_dw++] = 0;
ib               1104 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c 	ib->ptr[ib->length_dw++] = count - 1; /* number of entries */
ib               1113 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c static void sdma_v5_0_ring_pad_ib(struct amdgpu_ring *ring, struct amdgpu_ib *ib)
ib               1119 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c 	pad_count = (-ib->length_dw) & 0x7;
ib               1122 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c 			ib->ptr[ib->length_dw++] =
ib               1126 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c 			ib->ptr[ib->length_dw++] =
ib               1679 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c static void sdma_v5_0_emit_copy_buffer(struct amdgpu_ib *ib,
ib               1684 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c 	ib->ptr[ib->length_dw++] = SDMA_PKT_HEADER_OP(SDMA_OP_COPY) |
ib               1686 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c 	ib->ptr[ib->length_dw++] = byte_count - 1;
ib               1687 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c 	ib->ptr[ib->length_dw++] = 0; /* src/dst endian swap */
ib               1688 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c 	ib->ptr[ib->length_dw++] = lower_32_bits(src_offset);
ib               1689 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c 	ib->ptr[ib->length_dw++] = upper_32_bits(src_offset);
ib               1690 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c 	ib->ptr[ib->length_dw++] = lower_32_bits(dst_offset);
ib               1691 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c 	ib->ptr[ib->length_dw++] = upper_32_bits(dst_offset);
ib               1704 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c static void sdma_v5_0_emit_fill_buffer(struct amdgpu_ib *ib,
ib               1709 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c 	ib->ptr[ib->length_dw++] = SDMA_PKT_HEADER_OP(SDMA_OP_CONST_FILL);
ib               1710 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c 	ib->ptr[ib->length_dw++] = lower_32_bits(dst_offset);
ib               1711 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c 	ib->ptr[ib->length_dw++] = upper_32_bits(dst_offset);
ib               1712 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c 	ib->ptr[ib->length_dw++] = src_data;
ib               1713 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c 	ib->ptr[ib->length_dw++] = byte_count - 1;
ib                 65 drivers/gpu/drm/amd/amdgpu/si_dma.c 				struct amdgpu_ib *ib,
ib                 75 drivers/gpu/drm/amd/amdgpu/si_dma.c 	amdgpu_ring_write(ring, (ib->gpu_addr & 0xFFFFFFE0));
ib                 76 drivers/gpu/drm/amd/amdgpu/si_dma.c 	amdgpu_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF));
ib                255 drivers/gpu/drm/amd/amdgpu/si_dma.c 	struct amdgpu_ib ib;
ib                269 drivers/gpu/drm/amd/amdgpu/si_dma.c 	memset(&ib, 0, sizeof(ib));
ib                270 drivers/gpu/drm/amd/amdgpu/si_dma.c 	r = amdgpu_ib_get(adev, NULL, 256, &ib);
ib                274 drivers/gpu/drm/amd/amdgpu/si_dma.c 	ib.ptr[0] = DMA_PACKET(DMA_PACKET_WRITE, 0, 0, 0, 1);
ib                275 drivers/gpu/drm/amd/amdgpu/si_dma.c 	ib.ptr[1] = lower_32_bits(gpu_addr);
ib                276 drivers/gpu/drm/amd/amdgpu/si_dma.c 	ib.ptr[2] = upper_32_bits(gpu_addr) & 0xff;
ib                277 drivers/gpu/drm/amd/amdgpu/si_dma.c 	ib.ptr[3] = 0xDEADBEEF;
ib                278 drivers/gpu/drm/amd/amdgpu/si_dma.c 	ib.length_dw = 4;
ib                279 drivers/gpu/drm/amd/amdgpu/si_dma.c 	r = amdgpu_ib_schedule(ring, 1, &ib, NULL, &f);
ib                297 drivers/gpu/drm/amd/amdgpu/si_dma.c 	amdgpu_ib_free(adev, &ib, NULL);
ib                314 drivers/gpu/drm/amd/amdgpu/si_dma.c static void si_dma_vm_copy_pte(struct amdgpu_ib *ib,
ib                320 drivers/gpu/drm/amd/amdgpu/si_dma.c 	ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_COPY,
ib                322 drivers/gpu/drm/amd/amdgpu/si_dma.c 	ib->ptr[ib->length_dw++] = lower_32_bits(pe);
ib                323 drivers/gpu/drm/amd/amdgpu/si_dma.c 	ib->ptr[ib->length_dw++] = lower_32_bits(src);
ib                324 drivers/gpu/drm/amd/amdgpu/si_dma.c 	ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff;
ib                325 drivers/gpu/drm/amd/amdgpu/si_dma.c 	ib->ptr[ib->length_dw++] = upper_32_bits(src) & 0xff;
ib                339 drivers/gpu/drm/amd/amdgpu/si_dma.c static void si_dma_vm_write_pte(struct amdgpu_ib *ib, uint64_t pe,
ib                345 drivers/gpu/drm/amd/amdgpu/si_dma.c 	ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_WRITE, 0, 0, 0, ndw);
ib                346 drivers/gpu/drm/amd/amdgpu/si_dma.c 	ib->ptr[ib->length_dw++] = lower_32_bits(pe);
ib                347 drivers/gpu/drm/amd/amdgpu/si_dma.c 	ib->ptr[ib->length_dw++] = upper_32_bits(pe);
ib                349 drivers/gpu/drm/amd/amdgpu/si_dma.c 		ib->ptr[ib->length_dw++] = lower_32_bits(value);
ib                350 drivers/gpu/drm/amd/amdgpu/si_dma.c 		ib->ptr[ib->length_dw++] = upper_32_bits(value);
ib                367 drivers/gpu/drm/amd/amdgpu/si_dma.c static void si_dma_vm_set_pte_pde(struct amdgpu_ib *ib,
ib                386 drivers/gpu/drm/amd/amdgpu/si_dma.c 		ib->ptr[ib->length_dw++] = DMA_PTE_PDE_PACKET(ndw);
ib                387 drivers/gpu/drm/amd/amdgpu/si_dma.c 		ib->ptr[ib->length_dw++] = pe; /* dst addr */
ib                388 drivers/gpu/drm/amd/amdgpu/si_dma.c 		ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff;
ib                389 drivers/gpu/drm/amd/amdgpu/si_dma.c 		ib->ptr[ib->length_dw++] = lower_32_bits(flags); /* mask */
ib                390 drivers/gpu/drm/amd/amdgpu/si_dma.c 		ib->ptr[ib->length_dw++] = upper_32_bits(flags);
ib                391 drivers/gpu/drm/amd/amdgpu/si_dma.c 		ib->ptr[ib->length_dw++] = value; /* value */
ib                392 drivers/gpu/drm/amd/amdgpu/si_dma.c 		ib->ptr[ib->length_dw++] = upper_32_bits(value);
ib                393 drivers/gpu/drm/amd/amdgpu/si_dma.c 		ib->ptr[ib->length_dw++] = incr; /* increment size */
ib                394 drivers/gpu/drm/amd/amdgpu/si_dma.c 		ib->ptr[ib->length_dw++] = 0;
ib                407 drivers/gpu/drm/amd/amdgpu/si_dma.c static void si_dma_ring_pad_ib(struct amdgpu_ring *ring, struct amdgpu_ib *ib)
ib                409 drivers/gpu/drm/amd/amdgpu/si_dma.c 	while (ib->length_dw & 0x7)
ib                410 drivers/gpu/drm/amd/amdgpu/si_dma.c 		ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_NOP, 0, 0, 0, 0);
ib                775 drivers/gpu/drm/amd/amdgpu/si_dma.c static void si_dma_emit_copy_buffer(struct amdgpu_ib *ib,
ib                780 drivers/gpu/drm/amd/amdgpu/si_dma.c 	ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_COPY,
ib                782 drivers/gpu/drm/amd/amdgpu/si_dma.c 	ib->ptr[ib->length_dw++] = lower_32_bits(dst_offset);
ib                783 drivers/gpu/drm/amd/amdgpu/si_dma.c 	ib->ptr[ib->length_dw++] = lower_32_bits(src_offset);
ib                784 drivers/gpu/drm/amd/amdgpu/si_dma.c 	ib->ptr[ib->length_dw++] = upper_32_bits(dst_offset) & 0xff;
ib                785 drivers/gpu/drm/amd/amdgpu/si_dma.c 	ib->ptr[ib->length_dw++] = upper_32_bits(src_offset) & 0xff;
ib                798 drivers/gpu/drm/amd/amdgpu/si_dma.c static void si_dma_emit_fill_buffer(struct amdgpu_ib *ib,
ib                803 drivers/gpu/drm/amd/amdgpu/si_dma.c 	ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_CONSTANT_FILL,
ib                805 drivers/gpu/drm/amd/amdgpu/si_dma.c 	ib->ptr[ib->length_dw++] = lower_32_bits(dst_offset);
ib                806 drivers/gpu/drm/amd/amdgpu/si_dma.c 	ib->ptr[ib->length_dw++] = src_data;
ib                807 drivers/gpu/drm/amd/amdgpu/si_dma.c 	ib->ptr[ib->length_dw++] = upper_32_bits(dst_offset) << 16;
ib                513 drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c 				  struct amdgpu_ib *ib,
ib                517 drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c 	amdgpu_ring_write(ring, ib->gpu_addr);
ib                519 drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c 	amdgpu_ring_write(ring, ib->length_dw);
ib                529 drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c 				  struct amdgpu_ib *ib,
ib                533 drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c 	amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr));
ib                535 drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c 	amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr));
ib                537 drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c 	amdgpu_ring_write(ring, ib->length_dw);
ib                214 drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c 	struct amdgpu_ib *ib;
ib                223 drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c 	ib = &job->ibs[0];
ib                226 drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c 	ib->length_dw = 0;
ib                227 drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c 	ib->ptr[ib->length_dw++] = 0x00000018;
ib                228 drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c 	ib->ptr[ib->length_dw++] = 0x00000001; /* session info */
ib                229 drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c 	ib->ptr[ib->length_dw++] = handle;
ib                230 drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c 	ib->ptr[ib->length_dw++] = 0x00010000;
ib                231 drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c 	ib->ptr[ib->length_dw++] = upper_32_bits(addr);
ib                232 drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c 	ib->ptr[ib->length_dw++] = addr;
ib                234 drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c 	ib->ptr[ib->length_dw++] = 0x00000014;
ib                235 drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c 	ib->ptr[ib->length_dw++] = 0x00000002; /* task info */
ib                236 drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c 	ib->ptr[ib->length_dw++] = 0x0000001c;
ib                237 drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c 	ib->ptr[ib->length_dw++] = 0x00000001;
ib                238 drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c 	ib->ptr[ib->length_dw++] = 0x00000000;
ib                240 drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c 	ib->ptr[ib->length_dw++] = 0x00000008;
ib                241 drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c 	ib->ptr[ib->length_dw++] = 0x08000001; /* op initialize */
ib                243 drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c 	for (i = ib->length_dw; i < ib_size_dw; ++i)
ib                244 drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c 		ib->ptr[i] = 0x0;
ib                277 drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c 	struct amdgpu_ib *ib;
ib                286 drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c 	ib = &job->ibs[0];
ib                289 drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c 	ib->length_dw = 0;
ib                290 drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c 	ib->ptr[ib->length_dw++] = 0x00000018;
ib                291 drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c 	ib->ptr[ib->length_dw++] = 0x00000001; /* session info */
ib                292 drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c 	ib->ptr[ib->length_dw++] = handle;
ib                293 drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c 	ib->ptr[ib->length_dw++] = 0x00010000;
ib                294 drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c 	ib->ptr[ib->length_dw++] = upper_32_bits(addr);
ib                295 drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c 	ib->ptr[ib->length_dw++] = addr;
ib                297 drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c 	ib->ptr[ib->length_dw++] = 0x00000014;
ib                298 drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c 	ib->ptr[ib->length_dw++] = 0x00000002; /* task info */
ib                299 drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c 	ib->ptr[ib->length_dw++] = 0x0000001c;
ib                300 drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c 	ib->ptr[ib->length_dw++] = 0x00000001;
ib                301 drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c 	ib->ptr[ib->length_dw++] = 0x00000000;
ib                303 drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c 	ib->ptr[ib->length_dw++] = 0x00000008;
ib                304 drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c 	ib->ptr[ib->length_dw++] = 0x08000002; /* op close session */
ib                306 drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c 	for (i = ib->length_dw; i < ib_size_dw; ++i)
ib                307 drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c 		ib->ptr[i] = 0x0;
ib                993 drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c 				  struct amdgpu_ib *ib,
ib               1002 drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c 	amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr));
ib               1004 drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c 	amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr));
ib               1006 drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c 	amdgpu_ring_write(ring, ib->length_dw);
ib               1019 drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c 					struct amdgpu_ib *ib,
ib               1026 drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c 	amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr));
ib               1027 drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c 	amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr));
ib               1028 drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c 	amdgpu_ring_write(ring, ib->length_dw);
ib                222 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c 	struct amdgpu_ib *ib;
ib                231 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c 	ib = &job->ibs[0];
ib                234 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c 	ib->length_dw = 0;
ib                235 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c 	ib->ptr[ib->length_dw++] = 0x00000018;
ib                236 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c 	ib->ptr[ib->length_dw++] = 0x00000001; /* session info */
ib                237 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c 	ib->ptr[ib->length_dw++] = handle;
ib                238 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c 	ib->ptr[ib->length_dw++] = 0x00000000;
ib                239 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c 	ib->ptr[ib->length_dw++] = upper_32_bits(addr);
ib                240 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c 	ib->ptr[ib->length_dw++] = addr;
ib                242 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c 	ib->ptr[ib->length_dw++] = 0x00000014;
ib                243 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c 	ib->ptr[ib->length_dw++] = 0x00000002; /* task info */
ib                244 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c 	ib->ptr[ib->length_dw++] = 0x0000001c;
ib                245 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c 	ib->ptr[ib->length_dw++] = 0x00000000;
ib                246 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c 	ib->ptr[ib->length_dw++] = 0x00000000;
ib                248 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c 	ib->ptr[ib->length_dw++] = 0x00000008;
ib                249 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c 	ib->ptr[ib->length_dw++] = 0x08000001; /* op initialize */
ib                251 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c 	for (i = ib->length_dw; i < ib_size_dw; ++i)
ib                252 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c 		ib->ptr[i] = 0x0;
ib                284 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c 	struct amdgpu_ib *ib;
ib                293 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c 	ib = &job->ibs[0];
ib                296 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c 	ib->length_dw = 0;
ib                297 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c 	ib->ptr[ib->length_dw++] = 0x00000018;
ib                298 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c 	ib->ptr[ib->length_dw++] = 0x00000001;
ib                299 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c 	ib->ptr[ib->length_dw++] = handle;
ib                300 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c 	ib->ptr[ib->length_dw++] = 0x00000000;
ib                301 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c 	ib->ptr[ib->length_dw++] = upper_32_bits(addr);
ib                302 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c 	ib->ptr[ib->length_dw++] = addr;
ib                304 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c 	ib->ptr[ib->length_dw++] = 0x00000014;
ib                305 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c 	ib->ptr[ib->length_dw++] = 0x00000002;
ib                306 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c 	ib->ptr[ib->length_dw++] = 0x0000001c;
ib                307 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c 	ib->ptr[ib->length_dw++] = 0x00000000;
ib                308 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c 	ib->ptr[ib->length_dw++] = 0x00000000;
ib                310 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c 	ib->ptr[ib->length_dw++] = 0x00000008;
ib                311 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c 	ib->ptr[ib->length_dw++] = 0x08000002; /* op close session */
ib                313 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c 	for (i = ib->length_dw; i < ib_size_dw; ++i)
ib                314 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c 		ib->ptr[i] = 0x0;
ib               1264 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c 	struct amdgpu_ib *ib = &p->job->ibs[ib_idx];
ib               1271 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c 	for (i = 0; i < ib->length_dw; i += 2) {
ib               1292 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c 				  struct amdgpu_ib *ib,
ib               1304 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c 	amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr));
ib               1307 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c 	amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr));
ib               1310 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c 	amdgpu_ring_write(ring, ib->length_dw);
ib               1323 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c 					struct amdgpu_ib *ib,
ib               1330 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c 	amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr));
ib               1331 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c 	amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr));
ib               1332 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c 	amdgpu_ring_write(ring, ib->length_dw);
ib                836 drivers/gpu/drm/amd/amdgpu/vce_v3_0.c 				  struct amdgpu_ib *ib,
ib                843 drivers/gpu/drm/amd/amdgpu/vce_v3_0.c 	amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr));
ib                844 drivers/gpu/drm/amd/amdgpu/vce_v3_0.c 	amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr));
ib                845 drivers/gpu/drm/amd/amdgpu/vce_v3_0.c 	amdgpu_ring_write(ring, ib->length_dw);
ib                952 drivers/gpu/drm/amd/amdgpu/vce_v4_0.c 					struct amdgpu_ib *ib, uint32_t flags)
ib                958 drivers/gpu/drm/amd/amdgpu/vce_v4_0.c 	amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr));
ib                959 drivers/gpu/drm/amd/amdgpu/vce_v4_0.c 	amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr));
ib                960 drivers/gpu/drm/amd/amdgpu/vce_v4_0.c 	amdgpu_ring_write(ring, ib->length_dw);
ib               1518 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 					struct amdgpu_ib *ib,
ib               1530 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 	amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr));
ib               1533 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 	amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr));
ib               1536 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 	amdgpu_ring_write(ring, ib->length_dw);
ib               1678 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 					struct amdgpu_ib *ib,
ib               1685 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 	amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr));
ib               1686 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 	amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr));
ib               1687 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 	amdgpu_ring_write(ring, ib->length_dw);
ib               1883 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 					struct amdgpu_ib *ib,
ib               1899 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 	amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr));
ib               1903 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 	amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr));
ib               1907 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c 	amdgpu_ring_write(ring, ib->length_dw);
ib               1582 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 			       struct amdgpu_ib *ib,
ib               1592 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr));
ib               1594 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr));
ib               1596 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	amdgpu_ring_write(ring, ib->length_dw);
ib               1753 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 			       struct amdgpu_ib *ib,
ib               1760 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr));
ib               1761 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr));
ib               1762 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	amdgpu_ring_write(ring, ib->length_dw);
ib               1938 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 				struct amdgpu_ib *ib,
ib               1953 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr));
ib               1957 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr));
ib               1961 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c 	amdgpu_ring_write(ring, ib->length_dw);
ib                 33 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.h 				struct amdgpu_ib *ib, uint32_t flags);
ib                 45 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.h 				struct amdgpu_ib *ib, uint32_t flags);
ib                 57 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.h 				struct amdgpu_ib *ib, uint32_t flags);
ib                114 drivers/gpu/drm/amd/amdkfd/kfd_kernel_queue_v10.c 			uint64_t ib, size_t ib_size_in_dwords, bool chain)
ib                145 drivers/gpu/drm/amd/amdkfd/kfd_kernel_queue_v10.c 	packet->ordinal2 = lower_32_bits(ib);
ib                146 drivers/gpu/drm/amd/amdkfd/kfd_kernel_queue_v10.c 	packet->ib_base_hi = upper_32_bits(ib);
ib                110 drivers/gpu/drm/amd/amdkfd/kfd_kernel_queue_v9.c 			uint64_t ib, size_t ib_size_in_dwords, bool chain)
ib                141 drivers/gpu/drm/amd/amdkfd/kfd_kernel_queue_v9.c 	packet->ordinal2 = lower_32_bits(ib);
ib                142 drivers/gpu/drm/amd/amdkfd/kfd_kernel_queue_v9.c 	packet->ib_base_hi = upper_32_bits(ib);
ib                116 drivers/gpu/drm/amd/amdkfd/kfd_kernel_queue_vi.c 			uint64_t ib, size_t ib_size_in_dwords, bool chain)
ib                122 drivers/gpu/drm/amd/amdkfd/kfd_kernel_queue_vi.c 	if (WARN_ON(!ib))
ib                148 drivers/gpu/drm/amd/amdkfd/kfd_kernel_queue_vi.c 	packet->ordinal2 = lower_32_bits(ib);
ib                149 drivers/gpu/drm/amd/amdkfd/kfd_kernel_queue_vi.c 	packet->bitfields3.ib_base_hi = upper_32_bits(ib);
ib                948 drivers/gpu/drm/amd/amdkfd/kfd_priv.h 			uint64_t ib, size_t ib_size_in_dwords, bool chain);
ib                 28 drivers/gpu/drm/msm/disp/mdp5/mdp5_encoder.c 		.ib = (ib_val),				\
ib               3733 drivers/gpu/drm/radeon/cik.c void cik_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
ib               3735 drivers/gpu/drm/radeon/cik.c 	struct radeon_ring *ring = &rdev->ring[ib->ring];
ib               3736 drivers/gpu/drm/radeon/cik.c 	unsigned vm_id = ib->vm ? ib->vm->ids[ib->ring].id : 0;
ib               3739 drivers/gpu/drm/radeon/cik.c 	if (ib->is_const_ib) {
ib               3765 drivers/gpu/drm/radeon/cik.c 	control |= ib->length_dw | (vm_id << 24);
ib               3768 drivers/gpu/drm/radeon/cik.c 	radeon_ring_write(ring, (ib->gpu_addr & 0xFFFFFFFC));
ib               3769 drivers/gpu/drm/radeon/cik.c 	radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFFFF);
ib               3785 drivers/gpu/drm/radeon/cik.c 	struct radeon_ib ib;
ib               3797 drivers/gpu/drm/radeon/cik.c 	r = radeon_ib_get(rdev, ring->idx, &ib, NULL, 256);
ib               3803 drivers/gpu/drm/radeon/cik.c 	ib.ptr[0] = PACKET3(PACKET3_SET_UCONFIG_REG, 1);
ib               3804 drivers/gpu/drm/radeon/cik.c 	ib.ptr[1] = ((scratch - PACKET3_SET_UCONFIG_REG_START) >> 2);
ib               3805 drivers/gpu/drm/radeon/cik.c 	ib.ptr[2] = 0xDEADBEEF;
ib               3806 drivers/gpu/drm/radeon/cik.c 	ib.length_dw = 3;
ib               3807 drivers/gpu/drm/radeon/cik.c 	r = radeon_ib_schedule(rdev, &ib, NULL, false);
ib               3810 drivers/gpu/drm/radeon/cik.c 		radeon_ib_free(rdev, &ib);
ib               3814 drivers/gpu/drm/radeon/cik.c 	r = radeon_fence_wait_timeout(ib.fence, false, usecs_to_jiffies(
ib               3819 drivers/gpu/drm/radeon/cik.c 		radeon_ib_free(rdev, &ib);
ib               3824 drivers/gpu/drm/radeon/cik.c 		radeon_ib_free(rdev, &ib);
ib               3835 drivers/gpu/drm/radeon/cik.c 		DRM_INFO("ib test on ring %d succeeded in %u usecs\n", ib.fence->ring, i);
ib               3842 drivers/gpu/drm/radeon/cik.c 	radeon_ib_free(rdev, &ib);
ib               5605 drivers/gpu/drm/radeon/cik.c int cik_ib_parse(struct radeon_device *rdev, struct radeon_ib *ib)
ib                134 drivers/gpu/drm/radeon/cik_sdma.c 			      struct radeon_ib *ib)
ib                136 drivers/gpu/drm/radeon/cik_sdma.c 	struct radeon_ring *ring = &rdev->ring[ib->ring];
ib                137 drivers/gpu/drm/radeon/cik_sdma.c 	u32 extra_bits = (ib->vm ? ib->vm->ids[ib->ring].id : 0) & 0xf;
ib                155 drivers/gpu/drm/radeon/cik_sdma.c 	radeon_ring_write(ring, ib->gpu_addr & 0xffffffe0); /* base must be 32 byte aligned */
ib                156 drivers/gpu/drm/radeon/cik_sdma.c 	radeon_ring_write(ring, upper_32_bits(ib->gpu_addr));
ib                157 drivers/gpu/drm/radeon/cik_sdma.c 	radeon_ring_write(ring, ib->length_dw);
ib                704 drivers/gpu/drm/radeon/cik_sdma.c 	struct radeon_ib ib;
ib                721 drivers/gpu/drm/radeon/cik_sdma.c 	r = radeon_ib_get(rdev, ring->idx, &ib, NULL, 256);
ib                727 drivers/gpu/drm/radeon/cik_sdma.c 	ib.ptr[0] = SDMA_PACKET(SDMA_OPCODE_WRITE, SDMA_WRITE_SUB_OPCODE_LINEAR, 0);
ib                728 drivers/gpu/drm/radeon/cik_sdma.c 	ib.ptr[1] = lower_32_bits(gpu_addr);
ib                729 drivers/gpu/drm/radeon/cik_sdma.c 	ib.ptr[2] = upper_32_bits(gpu_addr);
ib                730 drivers/gpu/drm/radeon/cik_sdma.c 	ib.ptr[3] = 1;
ib                731 drivers/gpu/drm/radeon/cik_sdma.c 	ib.ptr[4] = 0xDEADBEEF;
ib                732 drivers/gpu/drm/radeon/cik_sdma.c 	ib.length_dw = 5;
ib                734 drivers/gpu/drm/radeon/cik_sdma.c 	r = radeon_ib_schedule(rdev, &ib, NULL, false);
ib                736 drivers/gpu/drm/radeon/cik_sdma.c 		radeon_ib_free(rdev, &ib);
ib                740 drivers/gpu/drm/radeon/cik_sdma.c 	r = radeon_fence_wait_timeout(ib.fence, false, usecs_to_jiffies(
ib                757 drivers/gpu/drm/radeon/cik_sdma.c 		DRM_INFO("ib test on ring %d succeeded in %u usecs\n", ib.fence->ring, i);
ib                762 drivers/gpu/drm/radeon/cik_sdma.c 	radeon_ib_free(rdev, &ib);
ib                804 drivers/gpu/drm/radeon/cik_sdma.c 			    struct radeon_ib *ib,
ib                813 drivers/gpu/drm/radeon/cik_sdma.c 		ib->ptr[ib->length_dw++] = SDMA_PACKET(SDMA_OPCODE_COPY,
ib                815 drivers/gpu/drm/radeon/cik_sdma.c 		ib->ptr[ib->length_dw++] = bytes;
ib                816 drivers/gpu/drm/radeon/cik_sdma.c 		ib->ptr[ib->length_dw++] = 0; /* src/dst endian swap */
ib                817 drivers/gpu/drm/radeon/cik_sdma.c 		ib->ptr[ib->length_dw++] = lower_32_bits(src);
ib                818 drivers/gpu/drm/radeon/cik_sdma.c 		ib->ptr[ib->length_dw++] = upper_32_bits(src);
ib                819 drivers/gpu/drm/radeon/cik_sdma.c 		ib->ptr[ib->length_dw++] = lower_32_bits(pe);
ib                820 drivers/gpu/drm/radeon/cik_sdma.c 		ib->ptr[ib->length_dw++] = upper_32_bits(pe);
ib                842 drivers/gpu/drm/radeon/cik_sdma.c 			     struct radeon_ib *ib,
ib                856 drivers/gpu/drm/radeon/cik_sdma.c 		ib->ptr[ib->length_dw++] = SDMA_PACKET(SDMA_OPCODE_WRITE,
ib                858 drivers/gpu/drm/radeon/cik_sdma.c 		ib->ptr[ib->length_dw++] = pe;
ib                859 drivers/gpu/drm/radeon/cik_sdma.c 		ib->ptr[ib->length_dw++] = upper_32_bits(pe);
ib                860 drivers/gpu/drm/radeon/cik_sdma.c 		ib->ptr[ib->length_dw++] = ndw;
ib                871 drivers/gpu/drm/radeon/cik_sdma.c 			ib->ptr[ib->length_dw++] = value;
ib                872 drivers/gpu/drm/radeon/cik_sdma.c 			ib->ptr[ib->length_dw++] = upper_32_bits(value);
ib                891 drivers/gpu/drm/radeon/cik_sdma.c 			   struct radeon_ib *ib,
ib                910 drivers/gpu/drm/radeon/cik_sdma.c 		ib->ptr[ib->length_dw++] = SDMA_PACKET(SDMA_OPCODE_GENERATE_PTE_PDE, 0, 0);
ib                911 drivers/gpu/drm/radeon/cik_sdma.c 		ib->ptr[ib->length_dw++] = pe; /* dst addr */
ib                912 drivers/gpu/drm/radeon/cik_sdma.c 		ib->ptr[ib->length_dw++] = upper_32_bits(pe);
ib                913 drivers/gpu/drm/radeon/cik_sdma.c 		ib->ptr[ib->length_dw++] = flags; /* mask */
ib                914 drivers/gpu/drm/radeon/cik_sdma.c 		ib->ptr[ib->length_dw++] = 0;
ib                915 drivers/gpu/drm/radeon/cik_sdma.c 		ib->ptr[ib->length_dw++] = value; /* value */
ib                916 drivers/gpu/drm/radeon/cik_sdma.c 		ib->ptr[ib->length_dw++] = upper_32_bits(value);
ib                917 drivers/gpu/drm/radeon/cik_sdma.c 		ib->ptr[ib->length_dw++] = incr; /* increment size */
ib                918 drivers/gpu/drm/radeon/cik_sdma.c 		ib->ptr[ib->length_dw++] = 0;
ib                919 drivers/gpu/drm/radeon/cik_sdma.c 		ib->ptr[ib->length_dw++] = ndw; /* number of entries */
ib                933 drivers/gpu/drm/radeon/cik_sdma.c void cik_sdma_vm_pad_ib(struct radeon_ib *ib)
ib                935 drivers/gpu/drm/radeon/cik_sdma.c 	while (ib->length_dw & 0x7)
ib                936 drivers/gpu/drm/radeon/cik_sdma.c 		ib->ptr[ib->length_dw++] = SDMA_PACKET(SDMA_OPCODE_NOP, 0, 0);
ib               2930 drivers/gpu/drm/radeon/evergreen.c void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
ib               2932 drivers/gpu/drm/radeon/evergreen.c 	struct radeon_ring *ring = &rdev->ring[ib->ring];
ib               2959 drivers/gpu/drm/radeon/evergreen.c 			  (ib->gpu_addr & 0xFFFFFFFC));
ib               2960 drivers/gpu/drm/radeon/evergreen.c 	radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF);
ib               2961 drivers/gpu/drm/radeon/evergreen.c 	radeon_ring_write(ring, ib->length_dw);
ib                451 drivers/gpu/drm/radeon/evergreen_cs.c 			uint32_t *ib = p->ib.ptr;
ib                473 drivers/gpu/drm/radeon/evergreen_cs.c 						ib[track->cb_color_slice_idx[id]] = slice;
ib               1098 drivers/gpu/drm/radeon/evergreen_cs.c 	u32 tmp, *ib;
ib               1101 drivers/gpu/drm/radeon/evergreen_cs.c 	ib = p->ib.ptr;
ib               1149 drivers/gpu/drm/radeon/evergreen_cs.c 		ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff);
ib               1178 drivers/gpu/drm/radeon/evergreen_cs.c 			ib[idx] &= ~Z_ARRAY_MODE(0xf);
ib               1180 drivers/gpu/drm/radeon/evergreen_cs.c 			ib[idx] |= Z_ARRAY_MODE(evergreen_cs_get_aray_mode(reloc->tiling_flags));
ib               1188 drivers/gpu/drm/radeon/evergreen_cs.c 				ib[idx] |= DB_NUM_BANKS(evergreen_cs_get_num_banks(track->nbanks));
ib               1189 drivers/gpu/drm/radeon/evergreen_cs.c 				ib[idx] |= DB_TILE_SPLIT(tile_split) |
ib               1221 drivers/gpu/drm/radeon/evergreen_cs.c 		ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff);
ib               1233 drivers/gpu/drm/radeon/evergreen_cs.c 		ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff);
ib               1245 drivers/gpu/drm/radeon/evergreen_cs.c 		ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff);
ib               1257 drivers/gpu/drm/radeon/evergreen_cs.c 		ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff);
ib               1281 drivers/gpu/drm/radeon/evergreen_cs.c 		ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff);
ib               1301 drivers/gpu/drm/radeon/evergreen_cs.c 		ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff);
ib               1366 drivers/gpu/drm/radeon/evergreen_cs.c 			ib[idx] |= CB_ARRAY_MODE(evergreen_cs_get_aray_mode(reloc->tiling_flags));
ib               1384 drivers/gpu/drm/radeon/evergreen_cs.c 			ib[idx] |= CB_ARRAY_MODE(evergreen_cs_get_aray_mode(reloc->tiling_flags));
ib               1452 drivers/gpu/drm/radeon/evergreen_cs.c 				ib[idx] |= CB_NUM_BANKS(evergreen_cs_get_num_banks(track->nbanks));
ib               1453 drivers/gpu/drm/radeon/evergreen_cs.c 				ib[idx] |= CB_TILE_SPLIT(tile_split) |
ib               1460 drivers/gpu/drm/radeon/evergreen_cs.c 		track->cb_color_attrib[tmp] = ib[idx];
ib               1480 drivers/gpu/drm/radeon/evergreen_cs.c 				ib[idx] |= CB_NUM_BANKS(evergreen_cs_get_num_banks(track->nbanks));
ib               1481 drivers/gpu/drm/radeon/evergreen_cs.c 				ib[idx] |= CB_TILE_SPLIT(tile_split) |
ib               1488 drivers/gpu/drm/radeon/evergreen_cs.c 		track->cb_color_attrib[tmp] = ib[idx];
ib               1505 drivers/gpu/drm/radeon/evergreen_cs.c 		ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff);
ib               1522 drivers/gpu/drm/radeon/evergreen_cs.c 		ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff);
ib               1563 drivers/gpu/drm/radeon/evergreen_cs.c 		ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff);
ib               1579 drivers/gpu/drm/radeon/evergreen_cs.c 		ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff);
ib               1591 drivers/gpu/drm/radeon/evergreen_cs.c 		ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff);
ib               1599 drivers/gpu/drm/radeon/evergreen_cs.c 		ib[idx] |= 3;
ib               1708 drivers/gpu/drm/radeon/evergreen_cs.c 		ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff);
ib               1722 drivers/gpu/drm/radeon/evergreen_cs.c 		ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff);
ib               1736 drivers/gpu/drm/radeon/evergreen_cs.c 		ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff);
ib               1777 drivers/gpu/drm/radeon/evergreen_cs.c 	uint32_t *ib;
ib               1785 drivers/gpu/drm/radeon/evergreen_cs.c 	ib = p->ib.ptr;
ib               1823 drivers/gpu/drm/radeon/evergreen_cs.c 		ib[idx + 0] = offset;
ib               1824 drivers/gpu/drm/radeon/evergreen_cs.c 		ib[idx + 1] = (tmp & 0xffffff00) | (upper_32_bits(offset) & 0xff);
ib               1869 drivers/gpu/drm/radeon/evergreen_cs.c 		ib[idx+0] = offset;
ib               1870 drivers/gpu/drm/radeon/evergreen_cs.c 		ib[idx+1] = upper_32_bits(offset) & 0xff;
ib               1904 drivers/gpu/drm/radeon/evergreen_cs.c 		ib[idx+0] = offset;
ib               1905 drivers/gpu/drm/radeon/evergreen_cs.c 		ib[idx+1] = upper_32_bits(offset) & 0xff;
ib               1932 drivers/gpu/drm/radeon/evergreen_cs.c 		ib[idx+1] = offset;
ib               1933 drivers/gpu/drm/radeon/evergreen_cs.c 		ib[idx+2] = upper_32_bits(offset) & 0xff;
ib               2025 drivers/gpu/drm/radeon/evergreen_cs.c 		ib[idx+1] = reloc->gpu_offset;
ib               2026 drivers/gpu/drm/radeon/evergreen_cs.c 		ib[idx+2] = upper_32_bits(reloc->gpu_offset) & 0xff;
ib               2079 drivers/gpu/drm/radeon/evergreen_cs.c 		ib[idx+0] = idx_value + (u32)(reloc->gpu_offset & 0xffffffff);
ib               2105 drivers/gpu/drm/radeon/evergreen_cs.c 			ib[idx+1] = (ib[idx+1] & 0x3) | (offset & 0xfffffffc);
ib               2106 drivers/gpu/drm/radeon/evergreen_cs.c 			ib[idx+2] = upper_32_bits(offset) & 0xff;
ib               2166 drivers/gpu/drm/radeon/evergreen_cs.c 				ib[idx] = offset;
ib               2167 drivers/gpu/drm/radeon/evergreen_cs.c 				ib[idx+1] = (ib[idx+1] & 0xffffff00) | (upper_32_bits(offset) & 0xff);
ib               2204 drivers/gpu/drm/radeon/evergreen_cs.c 				ib[idx+2] = offset;
ib               2205 drivers/gpu/drm/radeon/evergreen_cs.c 				ib[idx+3] = upper_32_bits(offset) & 0xff;
ib               2232 drivers/gpu/drm/radeon/evergreen_cs.c 			ib[idx+2] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff);
ib               2252 drivers/gpu/drm/radeon/evergreen_cs.c 			ib[idx+1] = offset & 0xfffffff8;
ib               2253 drivers/gpu/drm/radeon/evergreen_cs.c 			ib[idx+2] = upper_32_bits(offset) & 0xff;
ib               2274 drivers/gpu/drm/radeon/evergreen_cs.c 		ib[idx+1] = offset & 0xfffffffc;
ib               2275 drivers/gpu/drm/radeon/evergreen_cs.c 		ib[idx+2] = (ib[idx+2] & 0xffffff00) | (upper_32_bits(offset) & 0xff);
ib               2296 drivers/gpu/drm/radeon/evergreen_cs.c 		ib[idx+1] = offset & 0xfffffffc;
ib               2297 drivers/gpu/drm/radeon/evergreen_cs.c 		ib[idx+2] = (ib[idx+2] & 0xffffff00) | (upper_32_bits(offset) & 0xff);
ib               2361 drivers/gpu/drm/radeon/evergreen_cs.c 					ib[idx+1+(i*8)+1] |=
ib               2369 drivers/gpu/drm/radeon/evergreen_cs.c 						ib[idx+1+(i*8)+6] |= TEX_TILE_SPLIT(tile_split);
ib               2370 drivers/gpu/drm/radeon/evergreen_cs.c 						ib[idx+1+(i*8)+7] |=
ib               2381 drivers/gpu/drm/radeon/evergreen_cs.c 				tex_dim = ib[idx+1+(i*8)+0] & 0x7;
ib               2382 drivers/gpu/drm/radeon/evergreen_cs.c 				mip_address = ib[idx+1+(i*8)+3];
ib               2404 drivers/gpu/drm/radeon/evergreen_cs.c 				ib[idx+1+(i*8)+2] += toffset;
ib               2405 drivers/gpu/drm/radeon/evergreen_cs.c 				ib[idx+1+(i*8)+3] += moffset;
ib               2421 drivers/gpu/drm/radeon/evergreen_cs.c 					ib[idx+1+(i*8)+1] = radeon_bo_size(reloc->robj) - offset;
ib               2425 drivers/gpu/drm/radeon/evergreen_cs.c 				ib[idx+1+(i*8)+0] = offset64;
ib               2426 drivers/gpu/drm/radeon/evergreen_cs.c 				ib[idx+1+(i*8)+2] = (ib[idx+1+(i*8)+2] & 0xffffff00) |
ib               2506 drivers/gpu/drm/radeon/evergreen_cs.c 			ib[idx+1] = offset;
ib               2507 drivers/gpu/drm/radeon/evergreen_cs.c 			ib[idx+2] = upper_32_bits(offset) & 0xff;
ib               2525 drivers/gpu/drm/radeon/evergreen_cs.c 			ib[idx+3] = offset;
ib               2526 drivers/gpu/drm/radeon/evergreen_cs.c 			ib[idx+4] = upper_32_bits(offset) & 0xff;
ib               2554 drivers/gpu/drm/radeon/evergreen_cs.c 		ib[idx+0] = offset;
ib               2555 drivers/gpu/drm/radeon/evergreen_cs.c 		ib[idx+1] = upper_32_bits(offset) & 0xff;
ib               2579 drivers/gpu/drm/radeon/evergreen_cs.c 			ib[idx+1] = offset;
ib               2580 drivers/gpu/drm/radeon/evergreen_cs.c 			ib[idx+2] = upper_32_bits(offset) & 0xff;
ib               2606 drivers/gpu/drm/radeon/evergreen_cs.c 			ib[idx+3] = offset;
ib               2607 drivers/gpu/drm/radeon/evergreen_cs.c 			ib[idx+4] = upper_32_bits(offset) & 0xff;
ib               2655 drivers/gpu/drm/radeon/evergreen_cs.c 			ib[idx+1] = (offset & 0xfffffffc) | swap;
ib               2656 drivers/gpu/drm/radeon/evergreen_cs.c 			ib[idx+2] = upper_32_bits(offset) & 0xff;
ib               2778 drivers/gpu/drm/radeon/evergreen_cs.c 	for (r = 0; r < p->ib.length_dw; r++) {
ib               2779 drivers/gpu/drm/radeon/evergreen_cs.c 		pr_info("%05d  0x%08X\n", r, p->ib.ptr[r]);
ib               2802 drivers/gpu/drm/radeon/evergreen_cs.c 	uint32_t *ib = p->ib.ptr;
ib               2832 drivers/gpu/drm/radeon/evergreen_cs.c 				ib[idx+1] += (u32)(dst_reloc->gpu_offset >> 8);
ib               2840 drivers/gpu/drm/radeon/evergreen_cs.c 				ib[idx+1] += (u32)(dst_reloc->gpu_offset & 0xfffffffc);
ib               2841 drivers/gpu/drm/radeon/evergreen_cs.c 				ib[idx+2] += upper_32_bits(dst_reloc->gpu_offset) & 0xff;
ib               2883 drivers/gpu/drm/radeon/evergreen_cs.c 				ib[idx+1] += (u32)(dst_reloc->gpu_offset & 0xfffffffc);
ib               2884 drivers/gpu/drm/radeon/evergreen_cs.c 				ib[idx+2] += (u32)(src_reloc->gpu_offset & 0xfffffffc);
ib               2885 drivers/gpu/drm/radeon/evergreen_cs.c 				ib[idx+3] += upper_32_bits(dst_reloc->gpu_offset) & 0xff;
ib               2886 drivers/gpu/drm/radeon/evergreen_cs.c 				ib[idx+4] += upper_32_bits(src_reloc->gpu_offset) & 0xff;
ib               2896 drivers/gpu/drm/radeon/evergreen_cs.c 					ib[idx+1] += (u32)(src_reloc->gpu_offset >> 8);
ib               2900 drivers/gpu/drm/radeon/evergreen_cs.c 					ib[idx+7] += (u32)(dst_reloc->gpu_offset & 0xfffffffc);
ib               2901 drivers/gpu/drm/radeon/evergreen_cs.c 					ib[idx+8] += upper_32_bits(dst_reloc->gpu_offset) & 0xff;
ib               2906 drivers/gpu/drm/radeon/evergreen_cs.c 					ib[idx+7] += (u32)(src_reloc->gpu_offset & 0xfffffffc);
ib               2907 drivers/gpu/drm/radeon/evergreen_cs.c 					ib[idx+8] += upper_32_bits(src_reloc->gpu_offset) & 0xff;
ib               2911 drivers/gpu/drm/radeon/evergreen_cs.c 					ib[idx+1] += (u32)(dst_reloc->gpu_offset >> 8);
ib               2942 drivers/gpu/drm/radeon/evergreen_cs.c 				ib[idx+1] += (u32)(dst_reloc->gpu_offset & 0xffffffff);
ib               2943 drivers/gpu/drm/radeon/evergreen_cs.c 				ib[idx+2] += (u32)(src_reloc->gpu_offset & 0xffffffff);
ib               2944 drivers/gpu/drm/radeon/evergreen_cs.c 				ib[idx+3] += upper_32_bits(dst_reloc->gpu_offset) & 0xff;
ib               2945 drivers/gpu/drm/radeon/evergreen_cs.c 				ib[idx+4] += upper_32_bits(src_reloc->gpu_offset) & 0xff;
ib               2955 drivers/gpu/drm/radeon/evergreen_cs.c 				ib[idx+1] += (u32)(src_reloc->gpu_offset & 0xffffffff);
ib               2956 drivers/gpu/drm/radeon/evergreen_cs.c 				ib[idx+2] += upper_32_bits(src_reloc->gpu_offset) & 0xff;
ib               2957 drivers/gpu/drm/radeon/evergreen_cs.c 				ib[idx+4] += (u32)(dst_reloc->gpu_offset & 0xffffffff);
ib               2958 drivers/gpu/drm/radeon/evergreen_cs.c 				ib[idx+5] += upper_32_bits(dst_reloc->gpu_offset) & 0xff;
ib               2991 drivers/gpu/drm/radeon/evergreen_cs.c 				ib[idx+1] += (u32)(dst_reloc->gpu_offset & 0xfffffffc);
ib               2992 drivers/gpu/drm/radeon/evergreen_cs.c 				ib[idx+2] += (u32)(dst2_reloc->gpu_offset & 0xfffffffc);
ib               2993 drivers/gpu/drm/radeon/evergreen_cs.c 				ib[idx+3] += (u32)(src_reloc->gpu_offset & 0xfffffffc);
ib               2994 drivers/gpu/drm/radeon/evergreen_cs.c 				ib[idx+4] += upper_32_bits(dst_reloc->gpu_offset) & 0xff;
ib               2995 drivers/gpu/drm/radeon/evergreen_cs.c 				ib[idx+5] += upper_32_bits(dst2_reloc->gpu_offset) & 0xff;
ib               2996 drivers/gpu/drm/radeon/evergreen_cs.c 				ib[idx+6] += upper_32_bits(src_reloc->gpu_offset) & 0xff;
ib               3031 drivers/gpu/drm/radeon/evergreen_cs.c 				ib[idx+1] += (u32)(dst_reloc->gpu_offset >> 8);
ib               3032 drivers/gpu/drm/radeon/evergreen_cs.c 				ib[idx+2] += (u32)(dst2_reloc->gpu_offset >> 8);
ib               3033 drivers/gpu/drm/radeon/evergreen_cs.c 				ib[idx+8] += (u32)(src_reloc->gpu_offset & 0xfffffffc);
ib               3034 drivers/gpu/drm/radeon/evergreen_cs.c 				ib[idx+9] += upper_32_bits(src_reloc->gpu_offset) & 0xff;
ib               3047 drivers/gpu/drm/radeon/evergreen_cs.c 					ib[idx+1] += (u32)(src_reloc->gpu_offset >> 8);
ib               3049 drivers/gpu/drm/radeon/evergreen_cs.c 					ib[idx+7] += (u32)(dst_reloc->gpu_offset & 0xfffffffc);
ib               3050 drivers/gpu/drm/radeon/evergreen_cs.c 					ib[idx+8] += upper_32_bits(dst_reloc->gpu_offset) & 0xff;
ib               3053 drivers/gpu/drm/radeon/evergreen_cs.c 					ib[idx+7] += (u32)(src_reloc->gpu_offset & 0xfffffffc);
ib               3054 drivers/gpu/drm/radeon/evergreen_cs.c 					ib[idx+8] += upper_32_bits(src_reloc->gpu_offset) & 0xff;
ib               3056 drivers/gpu/drm/radeon/evergreen_cs.c 					ib[idx+1] += (u32)(dst_reloc->gpu_offset >> 8);
ib               3093 drivers/gpu/drm/radeon/evergreen_cs.c 				ib[idx+1] += (u32)(dst_reloc->gpu_offset >> 8);
ib               3094 drivers/gpu/drm/radeon/evergreen_cs.c 				ib[idx+2] += (u32)(dst2_reloc->gpu_offset >> 8);
ib               3095 drivers/gpu/drm/radeon/evergreen_cs.c 				ib[idx+8] += (u32)(src_reloc->gpu_offset & 0xfffffffc);
ib               3096 drivers/gpu/drm/radeon/evergreen_cs.c 				ib[idx+9] += upper_32_bits(src_reloc->gpu_offset) & 0xff;
ib               3107 drivers/gpu/drm/radeon/evergreen_cs.c 					ib[idx+1] += (u32)(src_reloc->gpu_offset >> 8);
ib               3111 drivers/gpu/drm/radeon/evergreen_cs.c 					ib[idx+7] += (u32)(dst_reloc->gpu_offset & 0xfffffffc);
ib               3112 drivers/gpu/drm/radeon/evergreen_cs.c 					ib[idx+8] += upper_32_bits(dst_reloc->gpu_offset) & 0xff;
ib               3117 drivers/gpu/drm/radeon/evergreen_cs.c 					ib[idx+7] += (u32)(src_reloc->gpu_offset & 0xfffffffc);
ib               3118 drivers/gpu/drm/radeon/evergreen_cs.c 					ib[idx+8] += upper_32_bits(src_reloc->gpu_offset) & 0xff;
ib               3122 drivers/gpu/drm/radeon/evergreen_cs.c 					ib[idx+1] += (u32)(dst_reloc->gpu_offset >> 8);
ib               3143 drivers/gpu/drm/radeon/evergreen_cs.c 				ib[idx+1] += (u32)(src_reloc->gpu_offset >> 8);
ib               3144 drivers/gpu/drm/radeon/evergreen_cs.c 				ib[idx+4] += (u32)(dst_reloc->gpu_offset >> 8);
ib               3180 drivers/gpu/drm/radeon/evergreen_cs.c 				ib[idx+1] += (u32)(dst_reloc->gpu_offset >> 8);
ib               3181 drivers/gpu/drm/radeon/evergreen_cs.c 				ib[idx+2] += (u32)(dst2_reloc->gpu_offset >> 8);
ib               3182 drivers/gpu/drm/radeon/evergreen_cs.c 				ib[idx+8] += (u32)(src_reloc->gpu_offset & 0xfffffffc);
ib               3183 drivers/gpu/drm/radeon/evergreen_cs.c 				ib[idx+9] += upper_32_bits(src_reloc->gpu_offset) & 0xff;
ib               3204 drivers/gpu/drm/radeon/evergreen_cs.c 			ib[idx+1] += (u32)(dst_reloc->gpu_offset & 0xfffffffc);
ib               3205 drivers/gpu/drm/radeon/evergreen_cs.c 			ib[idx+3] += (upper_32_bits(dst_reloc->gpu_offset) << 16) & 0x00ff0000;
ib               3217 drivers/gpu/drm/radeon/evergreen_cs.c 	for (r = 0; r < p->ib->length_dw; r++) {
ib               3218 drivers/gpu/drm/radeon/evergreen_cs.c 		pr_info("%05d  0x%08X\n", r, p->ib.ptr[r]);
ib               3349 drivers/gpu/drm/radeon/evergreen_cs.c 				      u32 *ib, struct radeon_cs_packet *pkt)
ib               3352 drivers/gpu/drm/radeon/evergreen_cs.c 	u32 idx_value = ib[idx];
ib               3409 drivers/gpu/drm/radeon/evergreen_cs.c 			reg = ib[idx + 5] * 4;
ib               3416 drivers/gpu/drm/radeon/evergreen_cs.c 			reg = ib[idx + 3] * 4;
ib               3437 drivers/gpu/drm/radeon/evergreen_cs.c 		command = ib[idx + 4];
ib               3438 drivers/gpu/drm/radeon/evergreen_cs.c 		info = ib[idx + 1];
ib               3475 drivers/gpu/drm/radeon/evergreen_cs.c 				start_reg = ib[idx + 2];
ib               3521 drivers/gpu/drm/radeon/evergreen_cs.c int evergreen_ib_parse(struct radeon_device *rdev, struct radeon_ib *ib)
ib               3529 drivers/gpu/drm/radeon/evergreen_cs.c 		pkt.type = RADEON_CP_PACKET_GET_TYPE(ib->ptr[idx]);
ib               3530 drivers/gpu/drm/radeon/evergreen_cs.c 		pkt.count = RADEON_CP_PACKET_GET_COUNT(ib->ptr[idx]);
ib               3541 drivers/gpu/drm/radeon/evergreen_cs.c 			pkt.opcode = RADEON_CP_PACKET3_GET_OPCODE(ib->ptr[idx]);
ib               3542 drivers/gpu/drm/radeon/evergreen_cs.c 			ret = evergreen_vm_packet3_check(rdev, ib->ptr, &pkt);
ib               3552 drivers/gpu/drm/radeon/evergreen_cs.c 	} while (idx < ib->length_dw);
ib               3566 drivers/gpu/drm/radeon/evergreen_cs.c int evergreen_dma_ib_parse(struct radeon_device *rdev, struct radeon_ib *ib)
ib               3572 drivers/gpu/drm/radeon/evergreen_cs.c 		header = ib->ptr[idx];
ib               3589 drivers/gpu/drm/radeon/evergreen_cs.c 				DRM_ERROR("bad DMA_PACKET_WRITE [%6d] 0x%08x sub cmd is not 0 or 8\n", idx, ib->ptr[idx]);
ib               3640 drivers/gpu/drm/radeon/evergreen_cs.c 				DRM_ERROR("bad DMA_PACKET_COPY [%6d] 0x%08x invalid sub cmd\n", idx, ib->ptr[idx]);
ib               3654 drivers/gpu/drm/radeon/evergreen_cs.c 	} while (idx < ib->length_dw);
ib                 68 drivers/gpu/drm/radeon/evergreen_dma.c 				   struct radeon_ib *ib)
ib                 70 drivers/gpu/drm/radeon/evergreen_dma.c 	struct radeon_ring *ring = &rdev->ring[ib->ring];
ib                 89 drivers/gpu/drm/radeon/evergreen_dma.c 	radeon_ring_write(ring, (ib->gpu_addr & 0xFFFFFFE0));
ib                 90 drivers/gpu/drm/radeon/evergreen_dma.c 	radeon_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF));
ib               1425 drivers/gpu/drm/radeon/ni.c void cayman_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
ib               1427 drivers/gpu/drm/radeon/ni.c 	struct radeon_ring *ring = &rdev->ring[ib->ring];
ib               1428 drivers/gpu/drm/radeon/ni.c 	unsigned vm_id = ib->vm ? ib->vm->ids[ib->ring].id : 0;
ib               1449 drivers/gpu/drm/radeon/ni.c 			  (ib->gpu_addr & 0xFFFFFFFC));
ib               1450 drivers/gpu/drm/radeon/ni.c 	radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF);
ib               1451 drivers/gpu/drm/radeon/ni.c 	radeon_ring_write(ring, ib->length_dw | (vm_id << 24));
ib                123 drivers/gpu/drm/radeon/ni_dma.c 				struct radeon_ib *ib)
ib                125 drivers/gpu/drm/radeon/ni_dma.c 	struct radeon_ring *ring = &rdev->ring[ib->ring];
ib                126 drivers/gpu/drm/radeon/ni_dma.c 	unsigned vm_id = ib->vm ? ib->vm->ids[ib->ring].id : 0;
ib                145 drivers/gpu/drm/radeon/ni_dma.c 	radeon_ring_write(ring, (ib->gpu_addr & 0xFFFFFFE0));
ib                146 drivers/gpu/drm/radeon/ni_dma.c 	radeon_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF));
ib                316 drivers/gpu/drm/radeon/ni_dma.c 			      struct radeon_ib *ib,
ib                327 drivers/gpu/drm/radeon/ni_dma.c 		ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_COPY,
ib                329 drivers/gpu/drm/radeon/ni_dma.c 		ib->ptr[ib->length_dw++] = lower_32_bits(pe);
ib                330 drivers/gpu/drm/radeon/ni_dma.c 		ib->ptr[ib->length_dw++] = lower_32_bits(src);
ib                331 drivers/gpu/drm/radeon/ni_dma.c 		ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff;
ib                332 drivers/gpu/drm/radeon/ni_dma.c 		ib->ptr[ib->length_dw++] = upper_32_bits(src) & 0xff;
ib                354 drivers/gpu/drm/radeon/ni_dma.c 			       struct radeon_ib *ib,
ib                368 drivers/gpu/drm/radeon/ni_dma.c 		ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_WRITE,
ib                370 drivers/gpu/drm/radeon/ni_dma.c 		ib->ptr[ib->length_dw++] = pe;
ib                371 drivers/gpu/drm/radeon/ni_dma.c 		ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff;
ib                382 drivers/gpu/drm/radeon/ni_dma.c 			ib->ptr[ib->length_dw++] = value;
ib                383 drivers/gpu/drm/radeon/ni_dma.c 			ib->ptr[ib->length_dw++] = upper_32_bits(value);
ib                402 drivers/gpu/drm/radeon/ni_dma.c 			     struct radeon_ib *ib,
ib                421 drivers/gpu/drm/radeon/ni_dma.c 		ib->ptr[ib->length_dw++] = DMA_PTE_PDE_PACKET(ndw);
ib                422 drivers/gpu/drm/radeon/ni_dma.c 		ib->ptr[ib->length_dw++] = pe; /* dst addr */
ib                423 drivers/gpu/drm/radeon/ni_dma.c 		ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff;
ib                424 drivers/gpu/drm/radeon/ni_dma.c 		ib->ptr[ib->length_dw++] = flags; /* mask */
ib                425 drivers/gpu/drm/radeon/ni_dma.c 		ib->ptr[ib->length_dw++] = 0;
ib                426 drivers/gpu/drm/radeon/ni_dma.c 		ib->ptr[ib->length_dw++] = value; /* value */
ib                427 drivers/gpu/drm/radeon/ni_dma.c 		ib->ptr[ib->length_dw++] = upper_32_bits(value);
ib                428 drivers/gpu/drm/radeon/ni_dma.c 		ib->ptr[ib->length_dw++] = incr; /* increment size */
ib                429 drivers/gpu/drm/radeon/ni_dma.c 		ib->ptr[ib->length_dw++] = 0;
ib                443 drivers/gpu/drm/radeon/ni_dma.c void cayman_dma_vm_pad_ib(struct radeon_ib *ib)
ib                445 drivers/gpu/drm/radeon/ni_dma.c 	while (ib->length_dw & 0x7)
ib                446 drivers/gpu/drm/radeon/ni_dma.c 		ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_NOP, 0, 0, 0);
ib               1295 drivers/gpu/drm/radeon/r100.c 		p->ib.ptr[idx] = (value & 0x3fc00000) | tmp;
ib               1297 drivers/gpu/drm/radeon/r100.c 		p->ib.ptr[idx] = (value & 0xffc00000) | tmp;
ib               1309 drivers/gpu/drm/radeon/r100.c 	volatile uint32_t *ib;
ib               1312 drivers/gpu/drm/radeon/r100.c 	ib = p->ib.ptr;
ib               1331 drivers/gpu/drm/radeon/r100.c 		ib[idx+1] = radeon_get_ib_value(p, idx + 1) + ((u32)reloc->gpu_offset);
ib               1343 drivers/gpu/drm/radeon/r100.c 		ib[idx+2] = radeon_get_ib_value(p, idx + 2) + ((u32)reloc->gpu_offset);
ib               1357 drivers/gpu/drm/radeon/r100.c 		ib[idx+1] = radeon_get_ib_value(p, idx + 1) + ((u32)reloc->gpu_offset);
ib               1432 drivers/gpu/drm/radeon/r100.c 	volatile uint32_t *ib;
ib               1434 drivers/gpu/drm/radeon/r100.c 	ib = p->ib.ptr;
ib               1475 drivers/gpu/drm/radeon/r100.c 		ib[h_idx + 2] = PACKET2(0);
ib               1476 drivers/gpu/drm/radeon/r100.c 		ib[h_idx + 3] = PACKET2(0);
ib               1491 drivers/gpu/drm/radeon/r100.c 		ib[h_idx] = header;
ib               1492 drivers/gpu/drm/radeon/r100.c 		ib[h_idx + 3] |= RADEON_ENG_DISPLAY_SELECT_CRTC1;
ib               1557 drivers/gpu/drm/radeon/r100.c 	volatile uint32_t *ib;
ib               1564 drivers/gpu/drm/radeon/r100.c 	ib = p->ib.ptr;
ib               1598 drivers/gpu/drm/radeon/r100.c 		ib[idx] = idx_value + ((u32)reloc->gpu_offset);
ib               1611 drivers/gpu/drm/radeon/r100.c 		ib[idx] = idx_value + ((u32)reloc->gpu_offset);
ib               1632 drivers/gpu/drm/radeon/r100.c 			ib[idx] = tmp + ((u32)reloc->gpu_offset);
ib               1634 drivers/gpu/drm/radeon/r100.c 			ib[idx] = idx_value + ((u32)reloc->gpu_offset);
ib               1652 drivers/gpu/drm/radeon/r100.c 		ib[idx] = idx_value + ((u32)reloc->gpu_offset);
ib               1670 drivers/gpu/drm/radeon/r100.c 		ib[idx] = idx_value + ((u32)reloc->gpu_offset);
ib               1688 drivers/gpu/drm/radeon/r100.c 		ib[idx] = idx_value + ((u32)reloc->gpu_offset);
ib               1713 drivers/gpu/drm/radeon/r100.c 			ib[idx] = tmp;
ib               1715 drivers/gpu/drm/radeon/r100.c 			ib[idx] = idx_value;
ib               1776 drivers/gpu/drm/radeon/r100.c 		ib[idx] = idx_value + ((u32)reloc->gpu_offset);
ib               1916 drivers/gpu/drm/radeon/r100.c 	volatile uint32_t *ib;
ib               1919 drivers/gpu/drm/radeon/r100.c 	ib = p->ib.ptr;
ib               1935 drivers/gpu/drm/radeon/r100.c 		ib[idx+1] = radeon_get_ib_value(p, idx+1) + ((u32)reloc->gpu_offset);
ib               1949 drivers/gpu/drm/radeon/r100.c 		ib[idx] = radeon_get_ib_value(p, idx) + ((u32)reloc->gpu_offset);
ib               3691 drivers/gpu/drm/radeon/r100.c void r100_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
ib               3702 drivers/gpu/drm/radeon/r100.c 	radeon_ring_write(ring, ib->gpu_addr);
ib               3703 drivers/gpu/drm/radeon/r100.c 	radeon_ring_write(ring, ib->length_dw);
ib               3708 drivers/gpu/drm/radeon/r100.c 	struct radeon_ib ib;
ib               3720 drivers/gpu/drm/radeon/r100.c 	r = radeon_ib_get(rdev, RADEON_RING_TYPE_GFX_INDEX, &ib, NULL, 256);
ib               3725 drivers/gpu/drm/radeon/r100.c 	ib.ptr[0] = PACKET0(scratch, 0);
ib               3726 drivers/gpu/drm/radeon/r100.c 	ib.ptr[1] = 0xDEADBEEF;
ib               3727 drivers/gpu/drm/radeon/r100.c 	ib.ptr[2] = PACKET2(0);
ib               3728 drivers/gpu/drm/radeon/r100.c 	ib.ptr[3] = PACKET2(0);
ib               3729 drivers/gpu/drm/radeon/r100.c 	ib.ptr[4] = PACKET2(0);
ib               3730 drivers/gpu/drm/radeon/r100.c 	ib.ptr[5] = PACKET2(0);
ib               3731 drivers/gpu/drm/radeon/r100.c 	ib.ptr[6] = PACKET2(0);
ib               3732 drivers/gpu/drm/radeon/r100.c 	ib.ptr[7] = PACKET2(0);
ib               3733 drivers/gpu/drm/radeon/r100.c 	ib.length_dw = 8;
ib               3734 drivers/gpu/drm/radeon/r100.c 	r = radeon_ib_schedule(rdev, &ib, NULL, false);
ib               3739 drivers/gpu/drm/radeon/r100.c 	r = radeon_fence_wait_timeout(ib.fence, false, usecs_to_jiffies(
ib               3765 drivers/gpu/drm/radeon/r100.c 	radeon_ib_free(rdev, &ib);
ib                151 drivers/gpu/drm/radeon/r200.c 	volatile uint32_t *ib;
ib                159 drivers/gpu/drm/radeon/r200.c 	ib = p->ib.ptr;
ib                191 drivers/gpu/drm/radeon/r200.c 		ib[idx] = idx_value + ((u32)reloc->gpu_offset);
ib                204 drivers/gpu/drm/radeon/r200.c 		ib[idx] = idx_value + ((u32)reloc->gpu_offset);
ib                228 drivers/gpu/drm/radeon/r200.c 			ib[idx] = tmp + ((u32)reloc->gpu_offset);
ib                230 drivers/gpu/drm/radeon/r200.c 			ib[idx] = idx_value + ((u32)reloc->gpu_offset);
ib                274 drivers/gpu/drm/radeon/r200.c 		ib[idx] = idx_value + ((u32)reloc->gpu_offset);
ib                300 drivers/gpu/drm/radeon/r200.c 			ib[idx] = tmp;
ib                302 drivers/gpu/drm/radeon/r200.c 			ib[idx] = idx_value;
ib                368 drivers/gpu/drm/radeon/r200.c 		ib[idx] = idx_value + ((u32)reloc->gpu_offset);
ib                637 drivers/gpu/drm/radeon/r300.c 	volatile uint32_t *ib;
ib                643 drivers/gpu/drm/radeon/r300.c 	ib = p->ib.ptr;
ib                679 drivers/gpu/drm/radeon/r300.c 		ib[idx] = idx_value + ((u32)reloc->gpu_offset);
ib                692 drivers/gpu/drm/radeon/r300.c 		ib[idx] = idx_value + ((u32)reloc->gpu_offset);
ib                720 drivers/gpu/drm/radeon/r300.c 			ib[idx] = (idx_value & 31) | /* keep the 1st 5 bits */
ib                732 drivers/gpu/drm/radeon/r300.c 			ib[idx] = tmp;
ib                801 drivers/gpu/drm/radeon/r300.c 			ib[idx] = tmp;
ib                886 drivers/gpu/drm/radeon/r300.c 			ib[idx] = tmp;
ib               1091 drivers/gpu/drm/radeon/r300.c 		ib[idx] = idx_value + ((u32)reloc->gpu_offset);
ib               1104 drivers/gpu/drm/radeon/r300.c 				ib[idx] = idx_value & ~1;
ib               1136 drivers/gpu/drm/radeon/r300.c 		ib[idx] = idx_value + ((u32)reloc->gpu_offset);
ib               1181 drivers/gpu/drm/radeon/r300.c 	volatile uint32_t *ib;
ib               1185 drivers/gpu/drm/radeon/r300.c 	ib = p->ib.ptr;
ib               1201 drivers/gpu/drm/radeon/r300.c 		ib[idx+1] = radeon_get_ib_value(p, idx + 1) + ((u32)reloc->gpu_offset);
ib               3369 drivers/gpu/drm/radeon/r600.c void r600_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
ib               3371 drivers/gpu/drm/radeon/r600.c 	struct radeon_ring *ring = &rdev->ring[ib->ring];
ib               3394 drivers/gpu/drm/radeon/r600.c 			  (ib->gpu_addr & 0xFFFFFFFC));
ib               3395 drivers/gpu/drm/radeon/r600.c 	radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF);
ib               3396 drivers/gpu/drm/radeon/r600.c 	radeon_ring_write(ring, ib->length_dw);
ib               3401 drivers/gpu/drm/radeon/r600.c 	struct radeon_ib ib;
ib               3413 drivers/gpu/drm/radeon/r600.c 	r = radeon_ib_get(rdev, ring->idx, &ib, NULL, 256);
ib               3418 drivers/gpu/drm/radeon/r600.c 	ib.ptr[0] = PACKET3(PACKET3_SET_CONFIG_REG, 1);
ib               3419 drivers/gpu/drm/radeon/r600.c 	ib.ptr[1] = ((scratch - PACKET3_SET_CONFIG_REG_OFFSET) >> 2);
ib               3420 drivers/gpu/drm/radeon/r600.c 	ib.ptr[2] = 0xDEADBEEF;
ib               3421 drivers/gpu/drm/radeon/r600.c 	ib.length_dw = 3;
ib               3422 drivers/gpu/drm/radeon/r600.c 	r = radeon_ib_schedule(rdev, &ib, NULL, false);
ib               3427 drivers/gpu/drm/radeon/r600.c 	r = radeon_fence_wait_timeout(ib.fence, false, usecs_to_jiffies(
ib               3445 drivers/gpu/drm/radeon/r600.c 		DRM_INFO("ib test on ring %d succeeded in %u usecs\n", ib.fence->ring, i);
ib               3452 drivers/gpu/drm/radeon/r600.c 	radeon_ib_free(rdev, &ib);
ib                357 drivers/gpu/drm/radeon/r600_cs.c 	volatile u32 *ib = p->ib.ptr;
ib                468 drivers/gpu/drm/radeon/r600_cs.c 	ib[track->cb_color_size_idx[i]] = tmp;
ib                527 drivers/gpu/drm/radeon/r600_cs.c 	volatile u32 *ib = p->ib.ptr;
ib                565 drivers/gpu/drm/radeon/r600_cs.c 		ib[track->db_depth_size_idx] = S_028000_SLICE_TILE_MAX(tmp - 1) | (track->db_depth_size & 0x3FF);
ib                835 drivers/gpu/drm/radeon/r600_cs.c 	volatile uint32_t *ib;
ib                837 drivers/gpu/drm/radeon/r600_cs.c 	ib = p->ib.ptr;
ib                900 drivers/gpu/drm/radeon/r600_cs.c 		ib[h_idx + 2] = PACKET2(0);
ib                901 drivers/gpu/drm/radeon/r600_cs.c 		ib[h_idx + 3] = PACKET2(0);
ib                902 drivers/gpu/drm/radeon/r600_cs.c 		ib[h_idx + 4] = PACKET2(0);
ib                903 drivers/gpu/drm/radeon/r600_cs.c 		ib[h_idx + 5] = PACKET2(0);
ib                904 drivers/gpu/drm/radeon/r600_cs.c 		ib[h_idx + 6] = PACKET2(0);
ib                905 drivers/gpu/drm/radeon/r600_cs.c 		ib[h_idx + 7] = PACKET2(0);
ib                906 drivers/gpu/drm/radeon/r600_cs.c 		ib[h_idx + 8] = PACKET2(0);
ib                910 drivers/gpu/drm/radeon/r600_cs.c 		ib[h_idx] = header;
ib                911 drivers/gpu/drm/radeon/r600_cs.c 		ib[h_idx + 4] = vline_status[crtc_id] >> 2;
ib                973 drivers/gpu/drm/radeon/r600_cs.c 	u32 m, i, tmp, *ib;
ib                984 drivers/gpu/drm/radeon/r600_cs.c 	ib = p->ib.ptr;
ib               1023 drivers/gpu/drm/radeon/r600_cs.c 		ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff);
ib               1042 drivers/gpu/drm/radeon/r600_cs.c 			ib[idx] &= C_028010_ARRAY_MODE;
ib               1045 drivers/gpu/drm/radeon/r600_cs.c 				ib[idx] |= S_028010_ARRAY_MODE(V_028010_ARRAY_2D_TILED_THIN1);
ib               1048 drivers/gpu/drm/radeon/r600_cs.c 				ib[idx] |= S_028010_ARRAY_MODE(V_028010_ARRAY_1D_TILED_THIN1);
ib               1085 drivers/gpu/drm/radeon/r600_cs.c 		ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff);
ib               1106 drivers/gpu/drm/radeon/r600_cs.c 		ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff);
ib               1144 drivers/gpu/drm/radeon/r600_cs.c 				ib[idx] |= S_0280A0_ARRAY_MODE(V_0280A0_ARRAY_2D_TILED_THIN1);
ib               1147 drivers/gpu/drm/radeon/r600_cs.c 				ib[idx] |= S_0280A0_ARRAY_MODE(V_0280A0_ARRAY_1D_TILED_THIN1);
ib               1206 drivers/gpu/drm/radeon/r600_cs.c 			ib[idx] = track->cb_color_base_last[tmp];
ib               1214 drivers/gpu/drm/radeon/r600_cs.c 			track->cb_color_frag_offset[tmp] = (u64)ib[idx] << 8;
ib               1215 drivers/gpu/drm/radeon/r600_cs.c 			ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff);
ib               1237 drivers/gpu/drm/radeon/r600_cs.c 			ib[idx] = track->cb_color_base_last[tmp];
ib               1245 drivers/gpu/drm/radeon/r600_cs.c 			track->cb_color_tile_offset[tmp] = (u64)ib[idx] << 8;
ib               1246 drivers/gpu/drm/radeon/r600_cs.c 			ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff);
ib               1282 drivers/gpu/drm/radeon/r600_cs.c 		ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff);
ib               1283 drivers/gpu/drm/radeon/r600_cs.c 		track->cb_color_base_last[tmp] = ib[idx];
ib               1296 drivers/gpu/drm/radeon/r600_cs.c 		ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff);
ib               1309 drivers/gpu/drm/radeon/r600_cs.c 		ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff);
ib               1316 drivers/gpu/drm/radeon/r600_cs.c 		ib[idx] |= 3;
ib               1378 drivers/gpu/drm/radeon/r600_cs.c 		ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff);
ib               1387 drivers/gpu/drm/radeon/r600_cs.c 		ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff);
ib               1631 drivers/gpu/drm/radeon/r600_cs.c 	volatile u32 *ib;
ib               1639 drivers/gpu/drm/radeon/r600_cs.c 	ib = p->ib.ptr;
ib               1677 drivers/gpu/drm/radeon/r600_cs.c 		ib[idx + 0] = offset;
ib               1678 drivers/gpu/drm/radeon/r600_cs.c 		ib[idx + 1] = (tmp & 0xffffff00) | (upper_32_bits(offset) & 0xff);
ib               1718 drivers/gpu/drm/radeon/r600_cs.c 		ib[idx+0] = offset;
ib               1719 drivers/gpu/drm/radeon/r600_cs.c 		ib[idx+1] = upper_32_bits(offset) & 0xff;
ib               1770 drivers/gpu/drm/radeon/r600_cs.c 			ib[idx+1] = (ib[idx+1] & 0x3) | (offset & 0xfffffff0);
ib               1771 drivers/gpu/drm/radeon/r600_cs.c 			ib[idx+2] = upper_32_bits(offset) & 0xff;
ib               1814 drivers/gpu/drm/radeon/r600_cs.c 			ib[idx] = offset;
ib               1815 drivers/gpu/drm/radeon/r600_cs.c 			ib[idx+1] = (ib[idx+1] & 0xffffff00) | (upper_32_bits(offset) & 0xff);
ib               1844 drivers/gpu/drm/radeon/r600_cs.c 			ib[idx+2] = offset;
ib               1845 drivers/gpu/drm/radeon/r600_cs.c 			ib[idx+3] = upper_32_bits(offset) & 0xff;
ib               1862 drivers/gpu/drm/radeon/r600_cs.c 			ib[idx+2] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff);
ib               1882 drivers/gpu/drm/radeon/r600_cs.c 			ib[idx+1] = offset & 0xfffffff8;
ib               1883 drivers/gpu/drm/radeon/r600_cs.c 			ib[idx+2] = upper_32_bits(offset) & 0xff;
ib               1904 drivers/gpu/drm/radeon/r600_cs.c 		ib[idx+1] = offset & 0xfffffffc;
ib               1905 drivers/gpu/drm/radeon/r600_cs.c 		ib[idx+2] = (ib[idx+2] & 0xffffff00) | (upper_32_bits(offset) & 0xff);
ib               1968 drivers/gpu/drm/radeon/r600_cs.c 						ib[idx+1+(i*7)+0] |= S_038000_TILE_MODE(V_038000_ARRAY_2D_TILED_THIN1);
ib               1970 drivers/gpu/drm/radeon/r600_cs.c 						ib[idx+1+(i*7)+0] |= S_038000_TILE_MODE(V_038000_ARRAY_1D_TILED_THIN1);
ib               1988 drivers/gpu/drm/radeon/r600_cs.c 				ib[idx+1+(i*7)+2] += base_offset;
ib               1989 drivers/gpu/drm/radeon/r600_cs.c 				ib[idx+1+(i*7)+3] += mip_offset;
ib               2006 drivers/gpu/drm/radeon/r600_cs.c 					ib[idx+1+(i*7)+1] = radeon_bo_size(reloc->robj) - offset;
ib               2010 drivers/gpu/drm/radeon/r600_cs.c 				ib[idx+1+(i*8)+0] = offset64;
ib               2011 drivers/gpu/drm/radeon/r600_cs.c 				ib[idx+1+(i*8)+2] = (ib[idx+1+(i*8)+2] & 0xffffff00) |
ib               2119 drivers/gpu/drm/radeon/r600_cs.c 			ib[idx+1] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff);
ib               2153 drivers/gpu/drm/radeon/r600_cs.c 			ib[idx+1] = offset;
ib               2154 drivers/gpu/drm/radeon/r600_cs.c 			ib[idx+2] = upper_32_bits(offset) & 0xff;
ib               2172 drivers/gpu/drm/radeon/r600_cs.c 			ib[idx+3] = offset;
ib               2173 drivers/gpu/drm/radeon/r600_cs.c 			ib[idx+4] = upper_32_bits(offset) & 0xff;
ib               2201 drivers/gpu/drm/radeon/r600_cs.c 		ib[idx+0] = offset;
ib               2202 drivers/gpu/drm/radeon/r600_cs.c 		ib[idx+1] = upper_32_bits(offset) & 0xff;
ib               2226 drivers/gpu/drm/radeon/r600_cs.c 			ib[idx+1] = offset;
ib               2227 drivers/gpu/drm/radeon/r600_cs.c 			ib[idx+2] = upper_32_bits(offset) & 0xff;
ib               2250 drivers/gpu/drm/radeon/r600_cs.c 			ib[idx+3] = offset;
ib               2251 drivers/gpu/drm/radeon/r600_cs.c 			ib[idx+4] = upper_32_bits(offset) & 0xff;
ib               2321 drivers/gpu/drm/radeon/r600_cs.c 	for (r = 0; r < p->ib.length_dw; r++) {
ib               2322 drivers/gpu/drm/radeon/r600_cs.c 		pr_info("%05d  0x%08X\n", r, p->ib.ptr[r]);
ib               2383 drivers/gpu/drm/radeon/r600_cs.c 	volatile u32 *ib = p->ib.ptr;
ib               2411 drivers/gpu/drm/radeon/r600_cs.c 				ib[idx+1] += (u32)(dst_reloc->gpu_offset >> 8);
ib               2417 drivers/gpu/drm/radeon/r600_cs.c 				ib[idx+1] += (u32)(dst_reloc->gpu_offset & 0xfffffffc);
ib               2418 drivers/gpu/drm/radeon/r600_cs.c 				ib[idx+2] += upper_32_bits(dst_reloc->gpu_offset) & 0xff;
ib               2445 drivers/gpu/drm/radeon/r600_cs.c 					ib[idx+1] += (u32)(src_reloc->gpu_offset >> 8);
ib               2449 drivers/gpu/drm/radeon/r600_cs.c 					ib[idx+5] += (u32)(dst_reloc->gpu_offset & 0xfffffffc);
ib               2450 drivers/gpu/drm/radeon/r600_cs.c 					ib[idx+6] += upper_32_bits(dst_reloc->gpu_offset) & 0xff;
ib               2455 drivers/gpu/drm/radeon/r600_cs.c 					ib[idx+5] += (u32)(src_reloc->gpu_offset & 0xfffffffc);
ib               2456 drivers/gpu/drm/radeon/r600_cs.c 					ib[idx+6] += upper_32_bits(src_reloc->gpu_offset) & 0xff;
ib               2460 drivers/gpu/drm/radeon/r600_cs.c 					ib[idx+1] += (u32)(dst_reloc->gpu_offset >> 8);
ib               2470 drivers/gpu/drm/radeon/r600_cs.c 					ib[idx+1] += (u32)(dst_reloc->gpu_offset & 0xfffffffc);
ib               2471 drivers/gpu/drm/radeon/r600_cs.c 					ib[idx+2] += (u32)(src_reloc->gpu_offset & 0xfffffffc);
ib               2472 drivers/gpu/drm/radeon/r600_cs.c 					ib[idx+3] += upper_32_bits(dst_reloc->gpu_offset) & 0xff;
ib               2473 drivers/gpu/drm/radeon/r600_cs.c 					ib[idx+4] += upper_32_bits(src_reloc->gpu_offset) & 0xff;
ib               2481 drivers/gpu/drm/radeon/r600_cs.c 					ib[idx+1] += (u32)(dst_reloc->gpu_offset & 0xfffffffc);
ib               2482 drivers/gpu/drm/radeon/r600_cs.c 					ib[idx+2] += (u32)(src_reloc->gpu_offset & 0xfffffffc);
ib               2483 drivers/gpu/drm/radeon/r600_cs.c 					ib[idx+3] += upper_32_bits(src_reloc->gpu_offset) & 0xff;
ib               2484 drivers/gpu/drm/radeon/r600_cs.c 					ib[idx+3] += (upper_32_bits(dst_reloc->gpu_offset) & 0xff) << 16;
ib               2516 drivers/gpu/drm/radeon/r600_cs.c 			ib[idx+1] += (u32)(dst_reloc->gpu_offset & 0xfffffffc);
ib               2517 drivers/gpu/drm/radeon/r600_cs.c 			ib[idx+3] += (upper_32_bits(dst_reloc->gpu_offset) << 16) & 0x00ff0000;
ib               2529 drivers/gpu/drm/radeon/r600_cs.c 	for (r = 0; r < p->ib->length_dw; r++) {
ib               2530 drivers/gpu/drm/radeon/r600_cs.c 		pr_info("%05d  0x%08X\n", r, p->ib.ptr[r]);
ib                339 drivers/gpu/drm/radeon/r600_dma.c 	struct radeon_ib ib;
ib                353 drivers/gpu/drm/radeon/r600_dma.c 	r = radeon_ib_get(rdev, ring->idx, &ib, NULL, 256);
ib                359 drivers/gpu/drm/radeon/r600_dma.c 	ib.ptr[0] = DMA_PACKET(DMA_PACKET_WRITE, 0, 0, 1);
ib                360 drivers/gpu/drm/radeon/r600_dma.c 	ib.ptr[1] = lower_32_bits(gpu_addr);
ib                361 drivers/gpu/drm/radeon/r600_dma.c 	ib.ptr[2] = upper_32_bits(gpu_addr) & 0xff;
ib                362 drivers/gpu/drm/radeon/r600_dma.c 	ib.ptr[3] = 0xDEADBEEF;
ib                363 drivers/gpu/drm/radeon/r600_dma.c 	ib.length_dw = 4;
ib                365 drivers/gpu/drm/radeon/r600_dma.c 	r = radeon_ib_schedule(rdev, &ib, NULL, false);
ib                367 drivers/gpu/drm/radeon/r600_dma.c 		radeon_ib_free(rdev, &ib);
ib                371 drivers/gpu/drm/radeon/r600_dma.c 	r = radeon_fence_wait_timeout(ib.fence, false, usecs_to_jiffies(
ib                388 drivers/gpu/drm/radeon/r600_dma.c 		DRM_INFO("ib test on ring %d succeeded in %u usecs\n", ib.fence->ring, i);
ib                393 drivers/gpu/drm/radeon/r600_dma.c 	radeon_ib_free(rdev, &ib);
ib                405 drivers/gpu/drm/radeon/r600_dma.c void r600_dma_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
ib                407 drivers/gpu/drm/radeon/r600_dma.c 	struct radeon_ring *ring = &rdev->ring[ib->ring];
ib                426 drivers/gpu/drm/radeon/r600_dma.c 	radeon_ring_write(ring, (ib->gpu_addr & 0xFFFFFFE0));
ib                427 drivers/gpu/drm/radeon/r600_dma.c 	radeon_ring_write(ring, (ib->length_dw << 16) | (upper_32_bits(ib->gpu_addr) & 0xFF));
ib               1006 drivers/gpu/drm/radeon/radeon.h 		  struct radeon_ib *ib, struct radeon_vm *vm,
ib               1008 drivers/gpu/drm/radeon/radeon.h void radeon_ib_free(struct radeon_device *rdev, struct radeon_ib *ib);
ib               1009 drivers/gpu/drm/radeon/radeon.h int radeon_ib_schedule(struct radeon_device *rdev, struct radeon_ib *ib,
ib               1078 drivers/gpu/drm/radeon/radeon.h 	struct radeon_ib	ib;
ib               1095 drivers/gpu/drm/radeon/radeon.h 	return p->ib.ptr[idx];
ib               1739 drivers/gpu/drm/radeon/radeon.h void radeon_vce_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
ib               1817 drivers/gpu/drm/radeon/radeon.h 	int (*ib_parse)(struct radeon_device *rdev, struct radeon_ib *ib);
ib               1821 drivers/gpu/drm/radeon/radeon.h 	void (*ib_execute)(struct radeon_device *rdev, struct radeon_ib *ib);
ib               1871 drivers/gpu/drm/radeon/radeon.h 				   struct radeon_ib *ib,
ib               1875 drivers/gpu/drm/radeon/radeon.h 				    struct radeon_ib *ib,
ib               1880 drivers/gpu/drm/radeon/radeon.h 				  struct radeon_ib *ib,
ib               1884 drivers/gpu/drm/radeon/radeon.h 		void (*pad_ib)(struct radeon_ib *ib);
ib               2711 drivers/gpu/drm/radeon/radeon.h #define radeon_asic_vm_copy_pages(rdev, ib, pe, src, count) ((rdev)->asic->vm.copy_pages((rdev), (ib), (pe), (src), (count)))
ib               2712 drivers/gpu/drm/radeon/radeon.h #define radeon_asic_vm_write_pages(rdev, ib, pe, addr, count, incr, flags) ((rdev)->asic->vm.write_pages((rdev), (ib), (pe), (addr), (count), (incr), (flags)))
ib               2713 drivers/gpu/drm/radeon/radeon.h #define radeon_asic_vm_set_pages(rdev, ib, pe, addr, count, incr, flags) ((rdev)->asic->vm.set_pages((rdev), (ib), (pe), (addr), (count), (incr), (flags)))
ib               2714 drivers/gpu/drm/radeon/radeon.h #define radeon_asic_vm_pad_ib(rdev, ib) ((rdev)->asic->vm.pad_ib((ib)))
ib               2718 drivers/gpu/drm/radeon/radeon.h #define radeon_ring_ib_execute(rdev, r, ib) (rdev)->asic->ring[(r)]->ib_execute((rdev), (ib))
ib               2719 drivers/gpu/drm/radeon/radeon.h #define radeon_ring_ib_parse(rdev, r, ib) (rdev)->asic->ring[(r)]->ib_parse((rdev), (ib))
ib                 95 drivers/gpu/drm/radeon/radeon_asic.h void r100_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
ib                334 drivers/gpu/drm/radeon/radeon_asic.h void r600_dma_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
ib                344 drivers/gpu/drm/radeon/radeon_asic.h void r600_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
ib                519 drivers/gpu/drm/radeon/radeon_asic.h void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
ib                546 drivers/gpu/drm/radeon/radeon_asic.h 				   struct radeon_ib *ib);
ib                611 drivers/gpu/drm/radeon/radeon_asic.h void cayman_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
ib                617 drivers/gpu/drm/radeon/radeon_asic.h int evergreen_ib_parse(struct radeon_device *rdev, struct radeon_ib *ib);
ib                618 drivers/gpu/drm/radeon/radeon_asic.h int evergreen_dma_ib_parse(struct radeon_device *rdev, struct radeon_ib *ib);
ib                620 drivers/gpu/drm/radeon/radeon_asic.h 				struct radeon_ib *ib);
ib                625 drivers/gpu/drm/radeon/radeon_asic.h 			      struct radeon_ib *ib,
ib                629 drivers/gpu/drm/radeon/radeon_asic.h 			       struct radeon_ib *ib,
ib                634 drivers/gpu/drm/radeon/radeon_asic.h 			     struct radeon_ib *ib,
ib                638 drivers/gpu/drm/radeon/radeon_asic.h void cayman_dma_vm_pad_ib(struct radeon_ib *ib);
ib                717 drivers/gpu/drm/radeon/radeon_asic.h void si_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
ib                724 drivers/gpu/drm/radeon/radeon_asic.h int si_ib_parse(struct radeon_device *rdev, struct radeon_ib *ib);
ib                731 drivers/gpu/drm/radeon/radeon_asic.h 			  struct radeon_ib *ib,
ib                735 drivers/gpu/drm/radeon/radeon_asic.h 			   struct radeon_ib *ib,
ib                740 drivers/gpu/drm/radeon/radeon_asic.h 			 struct radeon_ib *ib,
ib                795 drivers/gpu/drm/radeon/radeon_asic.h void cik_sdma_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
ib                822 drivers/gpu/drm/radeon/radeon_asic.h void cik_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
ib                833 drivers/gpu/drm/radeon/radeon_asic.h 			    struct radeon_ib *ib,
ib                837 drivers/gpu/drm/radeon/radeon_asic.h 			     struct radeon_ib *ib,
ib                842 drivers/gpu/drm/radeon/radeon_asic.h 			   struct radeon_ib *ib,
ib                846 drivers/gpu/drm/radeon/radeon_asic.h void cik_sdma_vm_pad_ib(struct radeon_ib *ib);
ib                850 drivers/gpu/drm/radeon/radeon_asic.h int cik_ib_parse(struct radeon_device *rdev, struct radeon_ib *ib);
ib                949 drivers/gpu/drm/radeon/radeon_asic.h void uvd_v1_0_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
ib                196 drivers/gpu/drm/radeon/radeon_cs.c 		p->vm_bos = radeon_vm_get_bos(p->rdev, p->ib.vm,
ib                261 drivers/gpu/drm/radeon/radeon_cs.c 		r = radeon_sync_resv(p->rdev, &p->ib.sync, resv,
ib                286 drivers/gpu/drm/radeon/radeon_cs.c 	p->ib.sa_bo = NULL;
ib                434 drivers/gpu/drm/radeon/radeon_cs.c 					    &parser->ib.fence->base);
ib                456 drivers/gpu/drm/radeon/radeon_cs.c 	radeon_ib_free(parser->rdev, &parser->ib);
ib                490 drivers/gpu/drm/radeon/radeon_cs.c 	r = radeon_ib_schedule(rdev, &parser->ib, NULL, true);
ib                536 drivers/gpu/drm/radeon/radeon_cs.c 		radeon_sync_fence(&p->ib.sync, bo_va->last_pt_update);
ib                561 drivers/gpu/drm/radeon/radeon_cs.c 	r = radeon_ring_ib_parse(rdev, parser->ring, &parser->ib);
ib                584 drivers/gpu/drm/radeon/radeon_cs.c 		r = radeon_ib_schedule(rdev, &parser->ib, &parser->const_ib, true);
ib                586 drivers/gpu/drm/radeon/radeon_cs.c 		r = radeon_ib_schedule(rdev, &parser->ib, NULL, true);
ib                646 drivers/gpu/drm/radeon/radeon_cs.c 	r =  radeon_ib_get(rdev, parser->ring, &parser->ib,
ib                652 drivers/gpu/drm/radeon/radeon_cs.c 	parser->ib.length_dw = ib_chunk->length_dw;
ib                654 drivers/gpu/drm/radeon/radeon_cs.c 		memcpy(parser->ib.ptr, ib_chunk->kdata, ib_chunk->length_dw * 4);
ib                655 drivers/gpu/drm/radeon/radeon_cs.c 	else if (copy_from_user(parser->ib.ptr, ib_chunk->user_ptr, ib_chunk->length_dw * 4))
ib                820 drivers/gpu/drm/radeon/radeon_cs.c 	volatile uint32_t *ib;
ib                824 drivers/gpu/drm/radeon/radeon_cs.c 	ib = p->ib.ptr;
ib                827 drivers/gpu/drm/radeon/radeon_cs.c 		DRM_INFO("ib[%d]=0x%08X\n", idx, ib[idx]);
ib                 59 drivers/gpu/drm/radeon/radeon_ib.c 		  struct radeon_ib *ib, struct radeon_vm *vm,
ib                 64 drivers/gpu/drm/radeon/radeon_ib.c 	r = radeon_sa_bo_new(rdev, &rdev->ring_tmp_bo, &ib->sa_bo, size, 256);
ib                 70 drivers/gpu/drm/radeon/radeon_ib.c 	radeon_sync_create(&ib->sync);
ib                 72 drivers/gpu/drm/radeon/radeon_ib.c 	ib->ring = ring;
ib                 73 drivers/gpu/drm/radeon/radeon_ib.c 	ib->fence = NULL;
ib                 74 drivers/gpu/drm/radeon/radeon_ib.c 	ib->ptr = radeon_sa_bo_cpu_addr(ib->sa_bo);
ib                 75 drivers/gpu/drm/radeon/radeon_ib.c 	ib->vm = vm;
ib                 80 drivers/gpu/drm/radeon/radeon_ib.c 		ib->gpu_addr = ib->sa_bo->soffset + RADEON_VA_IB_OFFSET;
ib                 82 drivers/gpu/drm/radeon/radeon_ib.c 		ib->gpu_addr = radeon_sa_bo_gpu_addr(ib->sa_bo);
ib                 84 drivers/gpu/drm/radeon/radeon_ib.c 	ib->is_const_ib = false;
ib                 97 drivers/gpu/drm/radeon/radeon_ib.c void radeon_ib_free(struct radeon_device *rdev, struct radeon_ib *ib)
ib                 99 drivers/gpu/drm/radeon/radeon_ib.c 	radeon_sync_free(rdev, &ib->sync, ib->fence);
ib                100 drivers/gpu/drm/radeon/radeon_ib.c 	radeon_sa_bo_free(rdev, &ib->sa_bo, ib->fence);
ib                101 drivers/gpu/drm/radeon/radeon_ib.c 	radeon_fence_unref(&ib->fence);
ib                125 drivers/gpu/drm/radeon/radeon_ib.c int radeon_ib_schedule(struct radeon_device *rdev, struct radeon_ib *ib,
ib                128 drivers/gpu/drm/radeon/radeon_ib.c 	struct radeon_ring *ring = &rdev->ring[ib->ring];
ib                131 drivers/gpu/drm/radeon/radeon_ib.c 	if (!ib->length_dw || !ring->ready) {
ib                145 drivers/gpu/drm/radeon/radeon_ib.c 	if (ib->vm) {
ib                147 drivers/gpu/drm/radeon/radeon_ib.c 		vm_id_fence = radeon_vm_grab_id(rdev, ib->vm, ib->ring);
ib                148 drivers/gpu/drm/radeon/radeon_ib.c 		radeon_sync_fence(&ib->sync, vm_id_fence);
ib                152 drivers/gpu/drm/radeon/radeon_ib.c 	r = radeon_sync_rings(rdev, &ib->sync, ib->ring);
ib                159 drivers/gpu/drm/radeon/radeon_ib.c 	if (ib->vm)
ib                160 drivers/gpu/drm/radeon/radeon_ib.c 		radeon_vm_flush(rdev, ib->vm, ib->ring,
ib                161 drivers/gpu/drm/radeon/radeon_ib.c 				ib->sync.last_vm_update);
ib                167 drivers/gpu/drm/radeon/radeon_ib.c 	radeon_ring_ib_execute(rdev, ib->ring, ib);
ib                168 drivers/gpu/drm/radeon/radeon_ib.c 	r = radeon_fence_emit(rdev, &ib->fence, ib->ring);
ib                175 drivers/gpu/drm/radeon/radeon_ib.c 		const_ib->fence = radeon_fence_ref(ib->fence);
ib                178 drivers/gpu/drm/radeon/radeon_ib.c 	if (ib->vm)
ib                179 drivers/gpu/drm/radeon/radeon_ib.c 		radeon_vm_fence(rdev, ib->vm, ib->fence);
ib                596 drivers/gpu/drm/radeon/radeon_uvd.c 	p->ib.ptr[data0] = start & 0xFFFFFFFF;
ib                597 drivers/gpu/drm/radeon/radeon_uvd.c 	p->ib.ptr[data1] = start >> 32;
ib                744 drivers/gpu/drm/radeon/radeon_uvd.c 	struct radeon_ib ib;
ib                747 drivers/gpu/drm/radeon/radeon_uvd.c 	r = radeon_ib_get(rdev, ring, &ib, NULL, 64);
ib                751 drivers/gpu/drm/radeon/radeon_uvd.c 	ib.ptr[0] = PACKET0(UVD_GPCOM_VCPU_DATA0, 0);
ib                752 drivers/gpu/drm/radeon/radeon_uvd.c 	ib.ptr[1] = addr;
ib                753 drivers/gpu/drm/radeon/radeon_uvd.c 	ib.ptr[2] = PACKET0(UVD_GPCOM_VCPU_DATA1, 0);
ib                754 drivers/gpu/drm/radeon/radeon_uvd.c 	ib.ptr[3] = addr >> 32;
ib                755 drivers/gpu/drm/radeon/radeon_uvd.c 	ib.ptr[4] = PACKET0(UVD_GPCOM_VCPU_CMD, 0);
ib                756 drivers/gpu/drm/radeon/radeon_uvd.c 	ib.ptr[5] = 0;
ib                758 drivers/gpu/drm/radeon/radeon_uvd.c 		ib.ptr[i] = PACKET0(UVD_NO_OP, 0);
ib                759 drivers/gpu/drm/radeon/radeon_uvd.c 		ib.ptr[i+1] = 0;
ib                761 drivers/gpu/drm/radeon/radeon_uvd.c 	ib.length_dw = 16;
ib                763 drivers/gpu/drm/radeon/radeon_uvd.c 	r = radeon_ib_schedule(rdev, &ib, NULL, false);
ib                766 drivers/gpu/drm/radeon/radeon_uvd.c 		*fence = radeon_fence_ref(ib.fence);
ib                768 drivers/gpu/drm/radeon/radeon_uvd.c 	radeon_ib_free(rdev, &ib);
ib                350 drivers/gpu/drm/radeon/radeon_vce.c 	struct radeon_ib ib;
ib                354 drivers/gpu/drm/radeon/radeon_vce.c 	r = radeon_ib_get(rdev, ring, &ib, NULL, ib_size_dw * 4);
ib                360 drivers/gpu/drm/radeon/radeon_vce.c 	dummy = ib.gpu_addr + 1024;
ib                363 drivers/gpu/drm/radeon/radeon_vce.c 	ib.length_dw = 0;
ib                364 drivers/gpu/drm/radeon/radeon_vce.c 	ib.ptr[ib.length_dw++] = cpu_to_le32(0x0000000c); /* len */
ib                365 drivers/gpu/drm/radeon/radeon_vce.c 	ib.ptr[ib.length_dw++] = cpu_to_le32(0x00000001); /* session cmd */
ib                366 drivers/gpu/drm/radeon/radeon_vce.c 	ib.ptr[ib.length_dw++] = cpu_to_le32(handle);
ib                368 drivers/gpu/drm/radeon/radeon_vce.c 	ib.ptr[ib.length_dw++] = cpu_to_le32(0x00000030); /* len */
ib                369 drivers/gpu/drm/radeon/radeon_vce.c 	ib.ptr[ib.length_dw++] = cpu_to_le32(0x01000001); /* create cmd */
ib                370 drivers/gpu/drm/radeon/radeon_vce.c 	ib.ptr[ib.length_dw++] = cpu_to_le32(0x00000000);
ib                371 drivers/gpu/drm/radeon/radeon_vce.c 	ib.ptr[ib.length_dw++] = cpu_to_le32(0x00000042);
ib                372 drivers/gpu/drm/radeon/radeon_vce.c 	ib.ptr[ib.length_dw++] = cpu_to_le32(0x0000000a);
ib                373 drivers/gpu/drm/radeon/radeon_vce.c 	ib.ptr[ib.length_dw++] = cpu_to_le32(0x00000001);
ib                374 drivers/gpu/drm/radeon/radeon_vce.c 	ib.ptr[ib.length_dw++] = cpu_to_le32(0x00000080);
ib                375 drivers/gpu/drm/radeon/radeon_vce.c 	ib.ptr[ib.length_dw++] = cpu_to_le32(0x00000060);
ib                376 drivers/gpu/drm/radeon/radeon_vce.c 	ib.ptr[ib.length_dw++] = cpu_to_le32(0x00000100);
ib                377 drivers/gpu/drm/radeon/radeon_vce.c 	ib.ptr[ib.length_dw++] = cpu_to_le32(0x00000100);
ib                378 drivers/gpu/drm/radeon/radeon_vce.c 	ib.ptr[ib.length_dw++] = cpu_to_le32(0x0000000c);
ib                379 drivers/gpu/drm/radeon/radeon_vce.c 	ib.ptr[ib.length_dw++] = cpu_to_le32(0x00000000);
ib                381 drivers/gpu/drm/radeon/radeon_vce.c 	ib.ptr[ib.length_dw++] = cpu_to_le32(0x00000014); /* len */
ib                382 drivers/gpu/drm/radeon/radeon_vce.c 	ib.ptr[ib.length_dw++] = cpu_to_le32(0x05000005); /* feedback buffer */
ib                383 drivers/gpu/drm/radeon/radeon_vce.c 	ib.ptr[ib.length_dw++] = cpu_to_le32(upper_32_bits(dummy));
ib                384 drivers/gpu/drm/radeon/radeon_vce.c 	ib.ptr[ib.length_dw++] = cpu_to_le32(dummy);
ib                385 drivers/gpu/drm/radeon/radeon_vce.c 	ib.ptr[ib.length_dw++] = cpu_to_le32(0x00000001);
ib                387 drivers/gpu/drm/radeon/radeon_vce.c 	for (i = ib.length_dw; i < ib_size_dw; ++i)
ib                388 drivers/gpu/drm/radeon/radeon_vce.c 		ib.ptr[i] = cpu_to_le32(0x0);
ib                390 drivers/gpu/drm/radeon/radeon_vce.c 	r = radeon_ib_schedule(rdev, &ib, NULL, false);
ib                396 drivers/gpu/drm/radeon/radeon_vce.c 		*fence = radeon_fence_ref(ib.fence);
ib                398 drivers/gpu/drm/radeon/radeon_vce.c 	radeon_ib_free(rdev, &ib);
ib                417 drivers/gpu/drm/radeon/radeon_vce.c 	struct radeon_ib ib;
ib                421 drivers/gpu/drm/radeon/radeon_vce.c 	r = radeon_ib_get(rdev, ring, &ib, NULL, ib_size_dw * 4);
ib                427 drivers/gpu/drm/radeon/radeon_vce.c 	dummy = ib.gpu_addr + 1024;
ib                430 drivers/gpu/drm/radeon/radeon_vce.c 	ib.length_dw = 0;
ib                431 drivers/gpu/drm/radeon/radeon_vce.c 	ib.ptr[ib.length_dw++] = cpu_to_le32(0x0000000c); /* len */
ib                432 drivers/gpu/drm/radeon/radeon_vce.c 	ib.ptr[ib.length_dw++] = cpu_to_le32(0x00000001); /* session cmd */
ib                433 drivers/gpu/drm/radeon/radeon_vce.c 	ib.ptr[ib.length_dw++] = cpu_to_le32(handle);
ib                435 drivers/gpu/drm/radeon/radeon_vce.c 	ib.ptr[ib.length_dw++] = cpu_to_le32(0x00000014); /* len */
ib                436 drivers/gpu/drm/radeon/radeon_vce.c 	ib.ptr[ib.length_dw++] = cpu_to_le32(0x05000005); /* feedback buffer */
ib                437 drivers/gpu/drm/radeon/radeon_vce.c 	ib.ptr[ib.length_dw++] = cpu_to_le32(upper_32_bits(dummy));
ib                438 drivers/gpu/drm/radeon/radeon_vce.c 	ib.ptr[ib.length_dw++] = cpu_to_le32(dummy);
ib                439 drivers/gpu/drm/radeon/radeon_vce.c 	ib.ptr[ib.length_dw++] = cpu_to_le32(0x00000001);
ib                441 drivers/gpu/drm/radeon/radeon_vce.c 	ib.ptr[ib.length_dw++] = cpu_to_le32(0x00000008); /* len */
ib                442 drivers/gpu/drm/radeon/radeon_vce.c 	ib.ptr[ib.length_dw++] = cpu_to_le32(0x02000001); /* destroy cmd */
ib                444 drivers/gpu/drm/radeon/radeon_vce.c 	for (i = ib.length_dw; i < ib_size_dw; ++i)
ib                445 drivers/gpu/drm/radeon/radeon_vce.c 		ib.ptr[i] = cpu_to_le32(0x0);
ib                447 drivers/gpu/drm/radeon/radeon_vce.c 	r = radeon_ib_schedule(rdev, &ib, NULL, false);
ib                453 drivers/gpu/drm/radeon/radeon_vce.c 		*fence = radeon_fence_ref(ib.fence);
ib                455 drivers/gpu/drm/radeon/radeon_vce.c 	radeon_ib_free(rdev, &ib);
ib                493 drivers/gpu/drm/radeon/radeon_vce.c 	p->ib.ptr[lo] = start & 0xFFFFFFFF;
ib                494 drivers/gpu/drm/radeon/radeon_vce.c 	p->ib.ptr[hi] = start >> 32;
ib                719 drivers/gpu/drm/radeon/radeon_vce.c void radeon_vce_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
ib                721 drivers/gpu/drm/radeon/radeon_vce.c 	struct radeon_ring *ring = &rdev->ring[ib->ring];
ib                723 drivers/gpu/drm/radeon/radeon_vce.c 	radeon_ring_write(ring, cpu_to_le32(ib->gpu_addr));
ib                724 drivers/gpu/drm/radeon/radeon_vce.c 	radeon_ring_write(ring, cpu_to_le32(upper_32_bits(ib->gpu_addr)));
ib                725 drivers/gpu/drm/radeon/radeon_vce.c 	radeon_ring_write(ring, cpu_to_le32(ib->length_dw));
ib                360 drivers/gpu/drm/radeon/radeon_vm.c 				struct radeon_ib *ib,
ib                369 drivers/gpu/drm/radeon/radeon_vm.c 		radeon_asic_vm_copy_pages(rdev, ib, pe, src, count);
ib                372 drivers/gpu/drm/radeon/radeon_vm.c 		radeon_asic_vm_write_pages(rdev, ib, pe, addr,
ib                376 drivers/gpu/drm/radeon/radeon_vm.c 		radeon_asic_vm_set_pages(rdev, ib, pe, addr,
ib                391 drivers/gpu/drm/radeon/radeon_vm.c 	struct radeon_ib ib;
ib                407 drivers/gpu/drm/radeon/radeon_vm.c 	r = radeon_ib_get(rdev, R600_RING_TYPE_DMA_INDEX, &ib, NULL, 256);
ib                411 drivers/gpu/drm/radeon/radeon_vm.c 	ib.length_dw = 0;
ib                413 drivers/gpu/drm/radeon/radeon_vm.c 	radeon_vm_set_pages(rdev, &ib, addr, 0, entries, 0, 0);
ib                414 drivers/gpu/drm/radeon/radeon_vm.c 	radeon_asic_vm_pad_ib(rdev, &ib);
ib                415 drivers/gpu/drm/radeon/radeon_vm.c 	WARN_ON(ib.length_dw > 64);
ib                417 drivers/gpu/drm/radeon/radeon_vm.c 	r = radeon_ib_schedule(rdev, &ib, NULL, false);
ib                421 drivers/gpu/drm/radeon/radeon_vm.c 	ib.fence->is_vm_update = true;
ib                422 drivers/gpu/drm/radeon/radeon_vm.c 	radeon_bo_fence(bo, ib.fence, false);
ib                425 drivers/gpu/drm/radeon/radeon_vm.c 	radeon_ib_free(rdev, &ib);
ib                649 drivers/gpu/drm/radeon/radeon_vm.c 	struct radeon_ib ib;
ib                662 drivers/gpu/drm/radeon/radeon_vm.c 	r = radeon_ib_get(rdev, R600_RING_TYPE_DMA_INDEX, &ib, NULL, ndw * 4);
ib                665 drivers/gpu/drm/radeon/radeon_vm.c 	ib.length_dw = 0;
ib                685 drivers/gpu/drm/radeon/radeon_vm.c 				radeon_vm_set_pages(rdev, &ib, last_pde,
ib                699 drivers/gpu/drm/radeon/radeon_vm.c 		radeon_vm_set_pages(rdev, &ib, last_pde, last_pt, count,
ib                702 drivers/gpu/drm/radeon/radeon_vm.c 	if (ib.length_dw != 0) {
ib                703 drivers/gpu/drm/radeon/radeon_vm.c 		radeon_asic_vm_pad_ib(rdev, &ib);
ib                705 drivers/gpu/drm/radeon/radeon_vm.c 		radeon_sync_resv(rdev, &ib.sync, pd->tbo.base.resv, true);
ib                706 drivers/gpu/drm/radeon/radeon_vm.c 		WARN_ON(ib.length_dw > ndw);
ib                707 drivers/gpu/drm/radeon/radeon_vm.c 		r = radeon_ib_schedule(rdev, &ib, NULL, false);
ib                709 drivers/gpu/drm/radeon/radeon_vm.c 			radeon_ib_free(rdev, &ib);
ib                712 drivers/gpu/drm/radeon/radeon_vm.c 		ib.fence->is_vm_update = true;
ib                713 drivers/gpu/drm/radeon/radeon_vm.c 		radeon_bo_fence(pd, ib.fence, false);
ib                715 drivers/gpu/drm/radeon/radeon_vm.c 	radeon_ib_free(rdev, &ib);
ib                733 drivers/gpu/drm/radeon/radeon_vm.c 				struct radeon_ib *ib,
ib                773 drivers/gpu/drm/radeon/radeon_vm.c 		radeon_vm_set_pages(rdev, ib, pe_start, addr, count,
ib                781 drivers/gpu/drm/radeon/radeon_vm.c 		radeon_vm_set_pages(rdev, ib, pe_start, addr, count,
ib                788 drivers/gpu/drm/radeon/radeon_vm.c 	radeon_vm_set_pages(rdev, ib, frag_start, addr, count,
ib                795 drivers/gpu/drm/radeon/radeon_vm.c 		radeon_vm_set_pages(rdev, ib, frag_end, addr, count,
ib                816 drivers/gpu/drm/radeon/radeon_vm.c 				 struct radeon_ib *ib,
ib                833 drivers/gpu/drm/radeon/radeon_vm.c 		radeon_sync_resv(rdev, &ib->sync, pt->tbo.base.resv, true);
ib                849 drivers/gpu/drm/radeon/radeon_vm.c 				radeon_vm_frag_ptes(rdev, ib, last_pte,
ib                866 drivers/gpu/drm/radeon/radeon_vm.c 		radeon_vm_frag_ptes(rdev, ib, last_pte,
ib                917 drivers/gpu/drm/radeon/radeon_vm.c 	struct radeon_ib ib;
ib               1000 drivers/gpu/drm/radeon/radeon_vm.c 	r = radeon_ib_get(rdev, R600_RING_TYPE_DMA_INDEX, &ib, NULL, ndw * 4);
ib               1003 drivers/gpu/drm/radeon/radeon_vm.c 	ib.length_dw = 0;
ib               1009 drivers/gpu/drm/radeon/radeon_vm.c 			radeon_sync_fence(&ib.sync, vm->ids[i].last_id_use);
ib               1012 drivers/gpu/drm/radeon/radeon_vm.c 	r = radeon_vm_update_ptes(rdev, vm, &ib, bo_va->it.start,
ib               1016 drivers/gpu/drm/radeon/radeon_vm.c 		radeon_ib_free(rdev, &ib);
ib               1020 drivers/gpu/drm/radeon/radeon_vm.c 	radeon_asic_vm_pad_ib(rdev, &ib);
ib               1021 drivers/gpu/drm/radeon/radeon_vm.c 	WARN_ON(ib.length_dw > ndw);
ib               1023 drivers/gpu/drm/radeon/radeon_vm.c 	r = radeon_ib_schedule(rdev, &ib, NULL, false);
ib               1025 drivers/gpu/drm/radeon/radeon_vm.c 		radeon_ib_free(rdev, &ib);
ib               1028 drivers/gpu/drm/radeon/radeon_vm.c 	ib.fence->is_vm_update = true;
ib               1029 drivers/gpu/drm/radeon/radeon_vm.c 	radeon_vm_fence_pts(vm, bo_va->it.start, bo_va->it.last + 1, ib.fence);
ib               1031 drivers/gpu/drm/radeon/radeon_vm.c 	bo_va->last_pt_update = radeon_fence_ref(ib.fence);
ib               1032 drivers/gpu/drm/radeon/radeon_vm.c 	radeon_ib_free(rdev, &ib);
ib               3404 drivers/gpu/drm/radeon/si.c void si_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
ib               3406 drivers/gpu/drm/radeon/si.c 	struct radeon_ring *ring = &rdev->ring[ib->ring];
ib               3407 drivers/gpu/drm/radeon/si.c 	unsigned vm_id = ib->vm ? ib->vm->ids[ib->ring].id : 0;
ib               3410 drivers/gpu/drm/radeon/si.c 	if (ib->is_const_ib) {
ib               3441 drivers/gpu/drm/radeon/si.c 			  (ib->gpu_addr & 0xFFFFFFFC));
ib               3442 drivers/gpu/drm/radeon/si.c 	radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFFFF);
ib               3443 drivers/gpu/drm/radeon/si.c 	radeon_ring_write(ring, ib->length_dw | (vm_id << 24));
ib               3445 drivers/gpu/drm/radeon/si.c 	if (!ib->is_const_ib) {
ib               4458 drivers/gpu/drm/radeon/si.c 				  u32 *ib, struct radeon_cs_packet *pkt)
ib               4479 drivers/gpu/drm/radeon/si.c static int si_vm_packet3_cp_dma_check(u32 *ib, u32 idx)
ib               4482 drivers/gpu/drm/radeon/si.c 	u32 command = ib[idx + 4];
ib               4483 drivers/gpu/drm/radeon/si.c 	u32 info = ib[idx + 1];
ib               4484 drivers/gpu/drm/radeon/si.c 	u32 idx_value = ib[idx];
ib               4509 drivers/gpu/drm/radeon/si.c 			start_reg = ib[idx + 2];
ib               4531 drivers/gpu/drm/radeon/si.c 				   u32 *ib, struct radeon_cs_packet *pkt)
ib               4535 drivers/gpu/drm/radeon/si.c 	u32 idx_value = ib[idx];
ib               4587 drivers/gpu/drm/radeon/si.c 			reg = ib[idx + 3] * 4;
ib               4594 drivers/gpu/drm/radeon/si.c 			start_reg = ib[idx + 1] * 4;
ib               4609 drivers/gpu/drm/radeon/si.c 			reg = ib[idx + 5] * 4;
ib               4616 drivers/gpu/drm/radeon/si.c 			reg = ib[idx + 3] * 4;
ib               4637 drivers/gpu/drm/radeon/si.c 		r = si_vm_packet3_cp_dma_check(ib, idx);
ib               4649 drivers/gpu/drm/radeon/si.c 				       u32 *ib, struct radeon_cs_packet *pkt)
ib               4653 drivers/gpu/drm/radeon/si.c 	u32 idx_value = ib[idx];
ib               4690 drivers/gpu/drm/radeon/si.c 			reg = ib[idx + 3] * 4;
ib               4697 drivers/gpu/drm/radeon/si.c 			start_reg = ib[idx + 1] * 4;
ib               4712 drivers/gpu/drm/radeon/si.c 			reg = ib[idx + 5] * 4;
ib               4719 drivers/gpu/drm/radeon/si.c 			reg = ib[idx + 3] * 4;
ib               4725 drivers/gpu/drm/radeon/si.c 		r = si_vm_packet3_cp_dma_check(ib, idx);
ib               4736 drivers/gpu/drm/radeon/si.c int si_ib_parse(struct radeon_device *rdev, struct radeon_ib *ib)
ib               4744 drivers/gpu/drm/radeon/si.c 		pkt.type = RADEON_CP_PACKET_GET_TYPE(ib->ptr[idx]);
ib               4745 drivers/gpu/drm/radeon/si.c 		pkt.count = RADEON_CP_PACKET_GET_COUNT(ib->ptr[idx]);
ib               4756 drivers/gpu/drm/radeon/si.c 			pkt.opcode = RADEON_CP_PACKET3_GET_OPCODE(ib->ptr[idx]);
ib               4757 drivers/gpu/drm/radeon/si.c 			if (ib->is_const_ib)
ib               4758 drivers/gpu/drm/radeon/si.c 				ret = si_vm_packet3_ce_check(rdev, ib->ptr, &pkt);
ib               4760 drivers/gpu/drm/radeon/si.c 				switch (ib->ring) {
ib               4762 drivers/gpu/drm/radeon/si.c 					ret = si_vm_packet3_gfx_check(rdev, ib->ptr, &pkt);
ib               4766 drivers/gpu/drm/radeon/si.c 					ret = si_vm_packet3_compute_check(rdev, ib->ptr, &pkt);
ib               4769 drivers/gpu/drm/radeon/si.c 					dev_err(rdev->dev, "Non-PM4 ring %d !\n", ib->ring);
ib               4782 drivers/gpu/drm/radeon/si.c 			for (i = 0; i < ib->length_dw; i++) {
ib               4784 drivers/gpu/drm/radeon/si.c 					printk("\t0x%08x <---\n", ib->ptr[i]);
ib               4786 drivers/gpu/drm/radeon/si.c 					printk("\t0x%08x\n", ib->ptr[i]);
ib               4790 drivers/gpu/drm/radeon/si.c 	} while (idx < ib->length_dw);
ib                 70 drivers/gpu/drm/radeon/si_dma.c 			  struct radeon_ib *ib,
ib                 79 drivers/gpu/drm/radeon/si_dma.c 		ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_COPY,
ib                 81 drivers/gpu/drm/radeon/si_dma.c 		ib->ptr[ib->length_dw++] = lower_32_bits(pe);
ib                 82 drivers/gpu/drm/radeon/si_dma.c 		ib->ptr[ib->length_dw++] = lower_32_bits(src);
ib                 83 drivers/gpu/drm/radeon/si_dma.c 		ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff;
ib                 84 drivers/gpu/drm/radeon/si_dma.c 		ib->ptr[ib->length_dw++] = upper_32_bits(src) & 0xff;
ib                106 drivers/gpu/drm/radeon/si_dma.c 			   struct radeon_ib *ib,
ib                120 drivers/gpu/drm/radeon/si_dma.c 		ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_WRITE, 0, 0, 0, ndw);
ib                121 drivers/gpu/drm/radeon/si_dma.c 		ib->ptr[ib->length_dw++] = pe;
ib                122 drivers/gpu/drm/radeon/si_dma.c 		ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff;
ib                133 drivers/gpu/drm/radeon/si_dma.c 			ib->ptr[ib->length_dw++] = value;
ib                134 drivers/gpu/drm/radeon/si_dma.c 			ib->ptr[ib->length_dw++] = upper_32_bits(value);
ib                153 drivers/gpu/drm/radeon/si_dma.c 			 struct radeon_ib *ib,
ib                172 drivers/gpu/drm/radeon/si_dma.c 		ib->ptr[ib->length_dw++] = DMA_PTE_PDE_PACKET(ndw);
ib                173 drivers/gpu/drm/radeon/si_dma.c 		ib->ptr[ib->length_dw++] = pe; /* dst addr */
ib                174 drivers/gpu/drm/radeon/si_dma.c 		ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff;
ib                175 drivers/gpu/drm/radeon/si_dma.c 		ib->ptr[ib->length_dw++] = flags; /* mask */
ib                176 drivers/gpu/drm/radeon/si_dma.c 		ib->ptr[ib->length_dw++] = 0;
ib                177 drivers/gpu/drm/radeon/si_dma.c 		ib->ptr[ib->length_dw++] = value; /* value */
ib                178 drivers/gpu/drm/radeon/si_dma.c 		ib->ptr[ib->length_dw++] = upper_32_bits(value);
ib                179 drivers/gpu/drm/radeon/si_dma.c 		ib->ptr[ib->length_dw++] = incr; /* increment size */
ib                180 drivers/gpu/drm/radeon/si_dma.c 		ib->ptr[ib->length_dw++] = 0;
ib                482 drivers/gpu/drm/radeon/uvd_v1_0.c void uvd_v1_0_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
ib                484 drivers/gpu/drm/radeon/uvd_v1_0.c 	struct radeon_ring *ring = &rdev->ring[ib->ring];
ib                487 drivers/gpu/drm/radeon/uvd_v1_0.c 	radeon_ring_write(ring, ib->gpu_addr);
ib                489 drivers/gpu/drm/radeon/uvd_v1_0.c 	radeon_ring_write(ring, ib->length_dw);
ib                258 drivers/gpu/drm/vc4/vc4_validate.c 	struct drm_gem_cma_object *ib;
ib                275 drivers/gpu/drm/vc4/vc4_validate.c 	ib = vc4_use_handle(exec, 0);
ib                276 drivers/gpu/drm/vc4/vc4_validate.c 	if (!ib)
ib                280 drivers/gpu/drm/vc4/vc4_validate.c 				  to_vc4_bo(&ib->base)->write_seqno);
ib                282 drivers/gpu/drm/vc4/vc4_validate.c 	if (offset > ib->base.size ||
ib                283 drivers/gpu/drm/vc4/vc4_validate.c 	    (ib->base.size - offset) / index_size < length) {
ib                285 drivers/gpu/drm/vc4/vc4_validate.c 			  offset, length, index_size, ib->base.size);
ib                289 drivers/gpu/drm/vc4/vc4_validate.c 	*(uint32_t *)(validated + 5) = ib->paddr + offset;
ib                364 drivers/infiniband/core/cma.c 		struct ib_sa_multicast *ib;
ib                523 drivers/infiniband/core/cma.c 	kfree(mc->multicast.ib);
ib               1117 drivers/infiniband/core/cma.c 		if (!id_priv->cm_id.ib || (id_priv->id.qp_type == IB_QPT_UD))
ib               1120 drivers/infiniband/core/cma.c 			ret = ib_cm_init_qp_attr(id_priv->cm_id.ib, qp_attr,
ib               1239 drivers/infiniband/core/cma.c 	struct sockaddr_ib *listen_ib, *ib;
ib               1243 drivers/infiniband/core/cma.c 		ib = (struct sockaddr_ib *)src_addr;
ib               1244 drivers/infiniband/core/cma.c 		ib->sib_family = AF_IB;
ib               1246 drivers/infiniband/core/cma.c 			ib->sib_pkey = path->pkey;
ib               1247 drivers/infiniband/core/cma.c 			ib->sib_flowinfo = path->flow_label;
ib               1248 drivers/infiniband/core/cma.c 			memcpy(&ib->sib_addr, &path->sgid, 16);
ib               1249 drivers/infiniband/core/cma.c 			ib->sib_sid = path->service_id;
ib               1250 drivers/infiniband/core/cma.c 			ib->sib_scope_id = 0;
ib               1252 drivers/infiniband/core/cma.c 			ib->sib_pkey = listen_ib->sib_pkey;
ib               1253 drivers/infiniband/core/cma.c 			ib->sib_flowinfo = listen_ib->sib_flowinfo;
ib               1254 drivers/infiniband/core/cma.c 			ib->sib_addr = listen_ib->sib_addr;
ib               1255 drivers/infiniband/core/cma.c 			ib->sib_sid = listen_ib->sib_sid;
ib               1256 drivers/infiniband/core/cma.c 			ib->sib_scope_id = listen_ib->sib_scope_id;
ib               1258 drivers/infiniband/core/cma.c 		ib->sib_sid_mask = cpu_to_be64(0xffffffffffffffffULL);
ib               1261 drivers/infiniband/core/cma.c 		ib = (struct sockaddr_ib *)dst_addr;
ib               1262 drivers/infiniband/core/cma.c 		ib->sib_family = AF_IB;
ib               1264 drivers/infiniband/core/cma.c 			ib->sib_pkey = path->pkey;
ib               1265 drivers/infiniband/core/cma.c 			ib->sib_flowinfo = path->flow_label;
ib               1266 drivers/infiniband/core/cma.c 			memcpy(&ib->sib_addr, &path->dgid, 16);
ib               1811 drivers/infiniband/core/cma.c 		cma_igmp_send(ndev, &mc->multicast.ib->rec.mgid, false);
ib               1827 drivers/infiniband/core/cma.c 			ib_sa_free_multicast(mc->multicast.ib);
ib               1854 drivers/infiniband/core/cma.c 			if (id_priv->cm_id.ib)
ib               1855 drivers/infiniband/core/cma.c 				ib_destroy_cm_id(id_priv->cm_id.ib);
ib               1893 drivers/infiniband/core/cma.c 	ret = ib_send_cm_rtu(id_priv->cm_id.ib, NULL, 0);
ib               1901 drivers/infiniband/core/cma.c 	ib_send_cm_rej(id_priv->cm_id.ib, IB_CM_REJ_CONSUMER_DEFINED,
ib               1991 drivers/infiniband/core/cma.c 		id_priv->cm_id.ib = NULL;
ib               2182 drivers/infiniband/core/cma.c 	conn_id->cm_id.ib = cm_id;
ib               2213 drivers/infiniband/core/cma.c 	conn_id->cm_id.ib = NULL;
ib               2425 drivers/infiniband/core/cma.c 	id_priv->cm_id.ib = id;
ib               3727 drivers/infiniband/core/cma.c 		id_priv->cm_id.ib = NULL;
ib               3778 drivers/infiniband/core/cma.c 	id_priv->cm_id.ib = id;
ib               3786 drivers/infiniband/core/cma.c 	ret = ib_send_cm_sidr_req(id_priv->cm_id.ib, &req);
ib               3788 drivers/infiniband/core/cma.c 		ib_destroy_cm_id(id_priv->cm_id.ib);
ib               3789 drivers/infiniband/core/cma.c 		id_priv->cm_id.ib = NULL;
ib               3829 drivers/infiniband/core/cma.c 	id_priv->cm_id.ib = id;
ib               3859 drivers/infiniband/core/cma.c 	ret = ib_send_cm_req(id_priv->cm_id.ib, &req);
ib               3863 drivers/infiniband/core/cma.c 		id_priv->cm_id.ib = NULL;
ib               3972 drivers/infiniband/core/cma.c 	ret = ib_send_cm_rep(id_priv->cm_id.ib, &rep);
ib               4021 drivers/infiniband/core/cma.c 	return ib_send_cm_sidr_rep(id_priv->cm_id.ib, &rep);
ib               4080 drivers/infiniband/core/cma.c 	if (!id_priv->cm_id.ib)
ib               4085 drivers/infiniband/core/cma.c 		ret = ib_cm_notify(id_priv->cm_id.ib, event);
ib               4102 drivers/infiniband/core/cma.c 	if (!id_priv->cm_id.ib)
ib               4110 drivers/infiniband/core/cma.c 			ret = ib_send_cm_rej(id_priv->cm_id.ib,
ib               4129 drivers/infiniband/core/cma.c 	if (!id_priv->cm_id.ib)
ib               4137 drivers/infiniband/core/cma.c 		if (ib_send_cm_dreq(id_priv->cm_id.ib, NULL, 0))
ib               4138 drivers/infiniband/core/cma.c 			ib_send_cm_drep(id_priv->cm_id.ib, NULL, 0);
ib               4297 drivers/infiniband/core/cma.c 	mc->multicast.ib = ib_sa_join_multicast(&sa_client, id_priv->id.device,
ib               4301 drivers/infiniband/core/cma.c 	return PTR_ERR_OR_ZERO(mc->multicast.ib);
ib               4308 drivers/infiniband/core/cma.c 	struct ib_sa_multicast *m = mc->multicast.ib;
ib               4310 drivers/infiniband/core/cma.c 	mc->multicast.ib->context = mc;
ib               4365 drivers/infiniband/core/cma.c 	mc->multicast.ib = kzalloc(sizeof(struct ib_sa_multicast), GFP_KERNEL);
ib               4366 drivers/infiniband/core/cma.c 	if (!mc->multicast.ib) {
ib               4373 drivers/infiniband/core/cma.c 	cma_iboe_set_mgid(addr, &mc->multicast.ib->rec.mgid, gid_type);
ib               4375 drivers/infiniband/core/cma.c 	mc->multicast.ib->rec.pkey = cpu_to_be16(0xffff);
ib               4377 drivers/infiniband/core/cma.c 		mc->multicast.ib->rec.qkey = cpu_to_be32(RDMA_UDP_QKEY);
ib               4385 drivers/infiniband/core/cma.c 	mc->multicast.ib->rec.rate = iboe_get_rate(ndev);
ib               4386 drivers/infiniband/core/cma.c 	mc->multicast.ib->rec.hop_limit = 1;
ib               4387 drivers/infiniband/core/cma.c 	mc->multicast.ib->rec.mtu = iboe_get_mtu(ndev->mtu);
ib               4391 drivers/infiniband/core/cma.c 			mc->multicast.ib->rec.hop_limit = IPV6_DEFAULT_HOPLIMIT;
ib               4393 drivers/infiniband/core/cma.c 				err = cma_igmp_send(ndev, &mc->multicast.ib->rec.mgid,
ib               4402 drivers/infiniband/core/cma.c 	if (err || !mc->multicast.ib->rec.mtu) {
ib               4408 drivers/infiniband/core/cma.c 		    &mc->multicast.ib->rec.port_gid);
ib               4418 drivers/infiniband/core/cma.c 	kfree(mc->multicast.ib);
ib               4487 drivers/infiniband/core/cma.c 						&mc->multicast.ib->rec.mgid,
ib               4488 drivers/infiniband/core/cma.c 						be16_to_cpu(mc->multicast.ib->rec.mlid));
ib               4493 drivers/infiniband/core/cma.c 				ib_sa_free_multicast(mc->multicast.ib);
ib                 77 drivers/infiniband/core/cma_priv.h 		struct ib_cm_id	*ib;
ib                 79 drivers/infiniband/core/sa_query.c 		struct ib_class_port_info ib;
ib                209 drivers/infiniband/core/sa_query.c 	{ PATH_REC_FIELD(ib.dlid),
ib                213 drivers/infiniband/core/sa_query.c 	{ PATH_REC_FIELD(ib.slid),
ib                217 drivers/infiniband/core/sa_query.c 	{ PATH_REC_FIELD(ib.raw_traffic),
ib               1977 drivers/infiniband/core/sa_query.c 		ret = ib_get_cpi_capmask2(&port->classport_info.data.ib)
ib               2035 drivers/infiniband/core/sa_query.c 				memcpy(&info->data.ib, &rec,
ib               2036 drivers/infiniband/core/sa_query.c 				       sizeof(info->data.ib));
ib                953 drivers/infiniband/core/ucma.c 			struct sa_path_rec ib;
ib                955 drivers/infiniband/core/ucma.c 			sa_convert_path_opa_to_ib(&ib, rec);
ib                956 drivers/infiniband/core/ucma.c 			ib_sa_pack_path(&ib, &resp->path_data[i].path_rec);
ib                 38 drivers/infiniband/core/uverbs_marshall.c 				  struct rdma_ah_attr *ib,
ib                 45 drivers/infiniband/core/uverbs_marshall.c 	*ib = *opa;
ib                 47 drivers/infiniband/core/uverbs_marshall.c 	ib->type = RDMA_AH_ATTR_TYPE_IB;
ib                 48 drivers/infiniband/core/uverbs_marshall.c 	rdma_ah_set_grh(ib, NULL, 0, 0, 1, 0);
ib                 52 drivers/infiniband/core/uverbs_marshall.c 		rdma_ah_set_subnet_prefix(ib, OPA_DEFAULT_GID_PREFIX);
ib                 55 drivers/infiniband/core/uverbs_marshall.c 		rdma_ah_set_subnet_prefix(ib,
ib                 58 drivers/infiniband/core/uverbs_marshall.c 	rdma_ah_set_interface_id(ib, OPA_MAKE_ID(rdma_ah_get_dlid(opa)));
ib                 48 drivers/infiniband/hw/mlx4/ah.c 	ah->av.ib.port_pd = cpu_to_be32(to_mpd(ib_ah->pd)->pdn |
ib                 50 drivers/infiniband/hw/mlx4/ah.c 	ah->av.ib.g_slid  = rdma_ah_get_path_bits(ah_attr);
ib                 51 drivers/infiniband/hw/mlx4/ah.c 	ah->av.ib.sl_tclass_flowlabel =
ib                 56 drivers/infiniband/hw/mlx4/ah.c 		ah->av.ib.g_slid   |= 0x80;
ib                 57 drivers/infiniband/hw/mlx4/ah.c 		ah->av.ib.gid_index = grh->sgid_index;
ib                 58 drivers/infiniband/hw/mlx4/ah.c 		ah->av.ib.hop_limit = grh->hop_limit;
ib                 59 drivers/infiniband/hw/mlx4/ah.c 		ah->av.ib.sl_tclass_flowlabel |=
ib                 62 drivers/infiniband/hw/mlx4/ah.c 		memcpy(ah->av.ib.dgid, grh->dgid.raw, 16);
ib                 65 drivers/infiniband/hw/mlx4/ah.c 	ah->av.ib.dlid = cpu_to_be16(rdma_ah_get_dlid(ah_attr));
ib                 73 drivers/infiniband/hw/mlx4/ah.c 		ah->av.ib.stat_rate = static_rate;
ib                136 drivers/infiniband/hw/mlx4/ah.c 		ah->av.ib.dlid = cpu_to_be16(0xc000);
ib                182 drivers/infiniband/hw/mlx4/ah.c 	mah->av.ib.port_pd &= cpu_to_be32(0x7FFFFFFF);
ib                197 drivers/infiniband/hw/mlx4/ah.c 	int port_num = be32_to_cpu(ah->av.ib.port_pd) >> 24;
ib                208 drivers/infiniband/hw/mlx4/ah.c 		rdma_ah_set_dlid(ah_attr, be16_to_cpu(ah->av.ib.dlid));
ib                210 drivers/infiniband/hw/mlx4/ah.c 			       be32_to_cpu(ah->av.ib.sl_tclass_flowlabel)
ib                215 drivers/infiniband/hw/mlx4/ah.c 	if (ah->av.ib.stat_rate)
ib                217 drivers/infiniband/hw/mlx4/ah.c 					ah->av.ib.stat_rate -
ib                219 drivers/infiniband/hw/mlx4/ah.c 	rdma_ah_set_path_bits(ah_attr, ah->av.ib.g_slid & 0x7F);
ib                221 drivers/infiniband/hw/mlx4/ah.c 		u32 tc_fl = be32_to_cpu(ah->av.ib.sl_tclass_flowlabel);
ib                224 drivers/infiniband/hw/mlx4/ah.c 				tc_fl & 0xfffff, ah->av.ib.gid_index,
ib                225 drivers/infiniband/hw/mlx4/ah.c 				ah->av.ib.hop_limit,
ib                227 drivers/infiniband/hw/mlx4/ah.c 		rdma_ah_set_dgid_raw(ah_attr, ah->av.ib.dgid);
ib               1572 drivers/infiniband/hw/mlx4/mad.c 	port = be32_to_cpu(ah.av.ib.port_pd) >> 24;
ib               1576 drivers/infiniband/hw/mlx4/mad.c 	ah.av.ib.port_pd = cpu_to_be32(port << 24 | (be32_to_cpu(ah.av.ib.port_pd) & 0xffffff));
ib               1370 drivers/infiniband/hw/mlx4/main.c 		if (FIELDS_NOT_SUPPORTED(ib_spec->ib.mask, LAST_IB_FIELD))
ib               1374 drivers/infiniband/hw/mlx4/main.c 		mlx4_spec->ib.l3_qpn =
ib               1376 drivers/infiniband/hw/mlx4/main.c 		mlx4_spec->ib.qpn_mask =
ib                813 drivers/infiniband/hw/mlx4/mlx4_ib.h 	u8 port = be32_to_cpu(ah->av.ib.port_pd) >> 24 & 3;
ib                818 drivers/infiniband/hw/mlx4/mlx4_ib.h 	return !!(ah->av.ib.g_slid & 0x80);
ib               2914 drivers/infiniband/hw/mlx4/qp.c 			be32_to_cpu(ah->av.ib.sl_tclass_flowlabel) >> 28;
ib               2916 drivers/infiniband/hw/mlx4/qp.c 			cpu_to_be16(ah->av.ib.g_slid & 0x7f);
ib               2918 drivers/infiniband/hw/mlx4/qp.c 			cpu_to_be16(ah->av.ib.g_slid & 0x7f);
ib               3068 drivers/infiniband/hw/mlx4/qp.c 							   be32_to_cpu(ah->av.ib.port_pd) >> 24,
ib               3069 drivers/infiniband/hw/mlx4/qp.c 							   ah->av.ib.gid_index, &sgid.raw[0]);
ib               3074 drivers/infiniband/hw/mlx4/qp.c 					    ah->av.ib.gid_index,
ib               3101 drivers/infiniband/hw/mlx4/qp.c 			be32_to_cpu(ah->av.ib.sl_tclass_flowlabel) >> 28;
ib               3102 drivers/infiniband/hw/mlx4/qp.c 		sqp->ud_header.lrh.destination_lid = ah->av.ib.dlid;
ib               3103 drivers/infiniband/hw/mlx4/qp.c 		sqp->ud_header.lrh.source_lid = cpu_to_be16(ah->av.ib.g_slid & 0x7f);
ib               3108 drivers/infiniband/hw/mlx4/qp.c 			(be32_to_cpu(ah->av.ib.sl_tclass_flowlabel) >> 20) & 0xff;
ib               3110 drivers/infiniband/hw/mlx4/qp.c 			ah->av.ib.sl_tclass_flowlabel & cpu_to_be32(0xfffff);
ib               3111 drivers/infiniband/hw/mlx4/qp.c 		sqp->ud_header.grh.hop_limit     = ah->av.ib.hop_limit;
ib               3126 drivers/infiniband/hw/mlx4/qp.c 						       guid_cache[ah->av.ib.gid_index];
ib               3133 drivers/infiniband/hw/mlx4/qp.c 		       ah->av.ib.dgid, 16);
ib               3138 drivers/infiniband/hw/mlx4/qp.c 			(be32_to_cpu(ah->av.ib.sl_tclass_flowlabel) >> 20) & 0xff;
ib               3145 drivers/infiniband/hw/mlx4/qp.c 		memcpy(&sqp->ud_header.ip4.daddr, ah->av.ib.dgid + 12, 4);
ib               3162 drivers/infiniband/hw/mlx4/qp.c 		if (ah->av.ib.port_pd & cpu_to_be32(0x80000000))
ib               3184 drivers/infiniband/hw/mlx4/qp.c 		u16 pcp = (be32_to_cpu(ah->av.ib.sl_tclass_flowlabel) >> 29) << 13;
ib               3390 drivers/infiniband/hw/mlx4/qp.c 	int port = *((u8 *) &av->ib.port_pd) & 0x3;
ib               3393 drivers/infiniband/hw/mlx4/qp.c 	sqp_av.port_pd = av->ib.port_pd | cpu_to_be32(0x80000000);
ib               3394 drivers/infiniband/hw/mlx4/qp.c 	sqp_av.g_slid = av->ib.g_slid & 0x7f; /* no GRH */
ib               3395 drivers/infiniband/hw/mlx4/qp.c 	sqp_av.sl_tclass_flowlabel = av->ib.sl_tclass_flowlabel &
ib               3562 drivers/infiniband/hw/mlx4/qp.c 					   ah->av.ib.gid_index,
ib               3568 drivers/infiniband/hw/mlx4/qp.c 				       ah->av.ib.gid_index);
ib                 29 drivers/infiniband/sw/siw/siw_cq.c 	enum ib_wc_status ib;
ib                 59 drivers/infiniband/sw/siw/siw_cq.c 		wc->status = map_cqe_status[cqe->status].ib;
ib               1155 drivers/media/common/v4l2-tpg/v4l2-tpg-colors.c 	double ir, ig, ib;
ib               1159 drivers/media/common/v4l2-tpg/v4l2-tpg-colors.c 	ib = m[2][0] * (*r) + m[2][1] * (*g) + m[2][2] * (*b);
ib               1162 drivers/media/common/v4l2-tpg/v4l2-tpg-colors.c 	*b = ib;
ib                207 drivers/media/dvb-frontends/bcm3510.c 	u8 ob[MAX_XFER_SIZE], ib[MAX_XFER_SIZE];
ib                210 drivers/media/dvb-frontends/bcm3510.c 	if (ilen + 2 > sizeof(ib)) {
ib                232 drivers/media/dvb-frontends/bcm3510.c 		(ret = bcm3510_hab_get_response(st, ib, ilen+2)) < 0)
ib                236 drivers/media/dvb-frontends/bcm3510.c 	dbufout(ib,ilen+2,deb_hab);
ib                239 drivers/media/dvb-frontends/bcm3510.c 	memcpy(ibuf,&ib[2],ilen);
ib                100 drivers/net/ethernet/amd/7990.c 		       t, ib->brx_ring[t].rmd1_hadr, ib->brx_ring[t].rmd0, \
ib                101 drivers/net/ethernet/amd/7990.c 		       ib->brx_ring[t].length, \
ib                102 drivers/net/ethernet/amd/7990.c 		       ib->brx_ring[t].mblength, ib->brx_ring[t].rmd1_bits); \
ib                106 drivers/net/ethernet/amd/7990.c 		       t, ib->btx_ring[t].tmd1_hadr, ib->btx_ring[t].tmd0, \
ib                107 drivers/net/ethernet/amd/7990.c 		       ib->btx_ring[t].length, \
ib                108 drivers/net/ethernet/amd/7990.c 		       ib->btx_ring[t].misc, ib->btx_ring[t].tmd1_bits); \
ib                140 drivers/net/ethernet/amd/7990.c 	volatile struct lance_init_block *ib = lp->init_block;
ib                150 drivers/net/ethernet/amd/7990.c 	ib->mode = LE_MO_PROM;                             /* normal, enable Tx & Rx */
ib                163 drivers/net/ethernet/amd/7990.c 	ib->phys_addr[0] = dev->dev_addr[1];
ib                164 drivers/net/ethernet/amd/7990.c 	ib->phys_addr[1] = dev->dev_addr[0];
ib                165 drivers/net/ethernet/amd/7990.c 	ib->phys_addr[2] = dev->dev_addr[3];
ib                166 drivers/net/ethernet/amd/7990.c 	ib->phys_addr[3] = dev->dev_addr[2];
ib                167 drivers/net/ethernet/amd/7990.c 	ib->phys_addr[4] = dev->dev_addr[5];
ib                168 drivers/net/ethernet/amd/7990.c 	ib->phys_addr[5] = dev->dev_addr[4];
ib                171 drivers/net/ethernet/amd/7990.c 	       ib->phys_addr[i] = dev->dev_addr[i];
ib                181 drivers/net/ethernet/amd/7990.c 		ib->btx_ring[i].tmd0      = leptr;
ib                182 drivers/net/ethernet/amd/7990.c 		ib->btx_ring[i].tmd1_hadr = leptr >> 16;
ib                183 drivers/net/ethernet/amd/7990.c 		ib->btx_ring[i].tmd1_bits = 0;
ib                184 drivers/net/ethernet/amd/7990.c 		ib->btx_ring[i].length    = 0xf000; /* The ones required by tmd2 */
ib                185 drivers/net/ethernet/amd/7990.c 		ib->btx_ring[i].misc      = 0;
ib                196 drivers/net/ethernet/amd/7990.c 		ib->brx_ring[i].rmd0      = leptr;
ib                197 drivers/net/ethernet/amd/7990.c 		ib->brx_ring[i].rmd1_hadr = leptr >> 16;
ib                198 drivers/net/ethernet/amd/7990.c 		ib->brx_ring[i].rmd1_bits = LE_R1_OWN;
ib                200 drivers/net/ethernet/amd/7990.c 		ib->brx_ring[i].length    = -RX_BUFF_SIZE | 0xf000;
ib                201 drivers/net/ethernet/amd/7990.c 		ib->brx_ring[i].mblength  = 0;
ib                210 drivers/net/ethernet/amd/7990.c 	ib->rx_len = (lp->lance_log_rx_bufs << 13) | (leptr >> 16);
ib                211 drivers/net/ethernet/amd/7990.c 	ib->rx_ptr = leptr;
ib                217 drivers/net/ethernet/amd/7990.c 	ib->tx_len = (lp->lance_log_tx_bufs << 13) | (leptr >> 16);
ib                218 drivers/net/ethernet/amd/7990.c 	ib->tx_ptr = leptr;
ib                223 drivers/net/ethernet/amd/7990.c 	ib->filter[0] = 0;
ib                224 drivers/net/ethernet/amd/7990.c 	ib->filter[1] = 0;
ib                275 drivers/net/ethernet/amd/7990.c 	volatile struct lance_init_block *ib = lp->init_block;
ib                287 drivers/net/ethernet/amd/7990.c 			       ib->brx_ring[i].rmd1_bits & LE_R1_OWN ? "_" : "X");
ib                290 drivers/net/ethernet/amd/7990.c 			      ib->brx_ring[i].rmd1_bits & LE_R1_OWN ? "." : "1");
ib                298 drivers/net/ethernet/amd/7990.c 	for (rd = &ib->brx_ring[lp->rx_new];     /* For each Rx ring we own... */
ib                300 drivers/net/ethernet/amd/7990.c 	     rd = &ib->brx_ring[lp->rx_new]) {
ib                336 drivers/net/ethernet/amd/7990.c 					 (unsigned char *)&(ib->rx_buf[lp->rx_new][0]),
ib                355 drivers/net/ethernet/amd/7990.c 	volatile struct lance_init_block *ib = lp->init_block;
ib                369 drivers/net/ethernet/amd/7990.c 		td = &ib->btx_ring[i];
ib                542 drivers/net/ethernet/amd/7990.c 	volatile struct lance_init_block *ib = lp->init_block;
ib                570 drivers/net/ethernet/amd/7990.c 	ib->btx_ring[entry].length = (-len) | 0xf000;
ib                571 drivers/net/ethernet/amd/7990.c 	ib->btx_ring[entry].misc = 0;
ib                574 drivers/net/ethernet/amd/7990.c 		memset((void *)&ib->tx_buf[entry][0], 0, ETH_ZLEN);
ib                575 drivers/net/ethernet/amd/7990.c 	skb_copy_from_linear_data(skb, (void *)&ib->tx_buf[entry][0], skblen);
ib                578 drivers/net/ethernet/amd/7990.c 	ib->btx_ring[entry].tmd1_bits = (LE_T1_POK|LE_T1_OWN);
ib                601 drivers/net/ethernet/amd/7990.c 	volatile struct lance_init_block *ib = lp->init_block;
ib                602 drivers/net/ethernet/amd/7990.c 	volatile u16 *mcast_table = (u16 *)&ib->filter;
ib                608 drivers/net/ethernet/amd/7990.c 		ib->filter[0] = 0xffffffff;
ib                609 drivers/net/ethernet/amd/7990.c 		ib->filter[1] = 0xffffffff;
ib                613 drivers/net/ethernet/amd/7990.c 	ib->filter[0] = 0;
ib                614 drivers/net/ethernet/amd/7990.c 	ib->filter[1] = 0;
ib                628 drivers/net/ethernet/amd/7990.c 	volatile struct lance_init_block *ib = lp->init_block;
ib                643 drivers/net/ethernet/amd/7990.c 		ib->mode |= LE_MO_PROM;
ib                645 drivers/net/ethernet/amd/7990.c 		ib->mode &= ~LE_MO_PROM;
ib                153 drivers/net/ethernet/amd/a2065.c 	volatile struct lance_init_block *ib = lp->init_block;
ib                164 drivers/net/ethernet/amd/a2065.c 	ib->mode = 0;
ib                169 drivers/net/ethernet/amd/a2065.c 	ib->phys_addr[0] = dev->dev_addr[1];
ib                170 drivers/net/ethernet/amd/a2065.c 	ib->phys_addr[1] = dev->dev_addr[0];
ib                171 drivers/net/ethernet/amd/a2065.c 	ib->phys_addr[2] = dev->dev_addr[3];
ib                172 drivers/net/ethernet/amd/a2065.c 	ib->phys_addr[3] = dev->dev_addr[2];
ib                173 drivers/net/ethernet/amd/a2065.c 	ib->phys_addr[4] = dev->dev_addr[5];
ib                174 drivers/net/ethernet/amd/a2065.c 	ib->phys_addr[5] = dev->dev_addr[4];
ib                180 drivers/net/ethernet/amd/a2065.c 		ib->btx_ring[i].tmd0      = leptr;
ib                181 drivers/net/ethernet/amd/a2065.c 		ib->btx_ring[i].tmd1_hadr = leptr >> 16;
ib                182 drivers/net/ethernet/amd/a2065.c 		ib->btx_ring[i].tmd1_bits = 0;
ib                183 drivers/net/ethernet/amd/a2065.c 		ib->btx_ring[i].length    = 0xf000; /* The ones required by tmd2 */
ib                184 drivers/net/ethernet/amd/a2065.c 		ib->btx_ring[i].misc      = 0;
ib                194 drivers/net/ethernet/amd/a2065.c 		ib->brx_ring[i].rmd0      = leptr;
ib                195 drivers/net/ethernet/amd/a2065.c 		ib->brx_ring[i].rmd1_hadr = leptr >> 16;
ib                196 drivers/net/ethernet/amd/a2065.c 		ib->brx_ring[i].rmd1_bits = LE_R1_OWN;
ib                197 drivers/net/ethernet/amd/a2065.c 		ib->brx_ring[i].length    = -RX_BUFF_SIZE | 0xf000;
ib                198 drivers/net/ethernet/amd/a2065.c 		ib->brx_ring[i].mblength  = 0;
ib                207 drivers/net/ethernet/amd/a2065.c 	ib->rx_len = (lp->lance_log_rx_bufs << 13) | (leptr >> 16);
ib                208 drivers/net/ethernet/amd/a2065.c 	ib->rx_ptr = leptr;
ib                213 drivers/net/ethernet/amd/a2065.c 	ib->tx_len = (lp->lance_log_tx_bufs << 13) | (leptr >> 16);
ib                214 drivers/net/ethernet/amd/a2065.c 	ib->tx_ptr = leptr;
ib                218 drivers/net/ethernet/amd/a2065.c 	ib->filter[0] = 0;
ib                219 drivers/net/ethernet/amd/a2065.c 	ib->filter[1] = 0;
ib                248 drivers/net/ethernet/amd/a2065.c 	volatile struct lance_init_block *ib = lp->init_block;
ib                258 drivers/net/ethernet/amd/a2065.c 		char r1_own = ib->brx_ring[i].rmd1_bits & LE_R1_OWN;
ib                270 drivers/net/ethernet/amd/a2065.c 	for (rd = &ib->brx_ring[lp->rx_new];
ib                272 drivers/net/ethernet/amd/a2065.c 	     rd = &ib->brx_ring[lp->rx_new]) {
ib                308 drivers/net/ethernet/amd/a2065.c 				 (unsigned char *)&ib->rx_buf[lp->rx_new][0],
ib                327 drivers/net/ethernet/amd/a2065.c 	volatile struct lance_init_block *ib = lp->init_block;
ib                339 drivers/net/ethernet/amd/a2065.c 		td = &ib->btx_ring[i];
ib                540 drivers/net/ethernet/amd/a2065.c 	volatile struct lance_init_block *ib = lp->init_block;
ib                560 drivers/net/ethernet/amd/a2065.c 	ib->btx_ring[entry].length = (-skblen) | 0xf000;
ib                561 drivers/net/ethernet/amd/a2065.c 	ib->btx_ring[entry].misc = 0;
ib                563 drivers/net/ethernet/amd/a2065.c 	skb_copy_from_linear_data(skb, (void *)&ib->tx_buf[entry][0], skblen);
ib                566 drivers/net/ethernet/amd/a2065.c 	ib->btx_ring[entry].tmd1_bits = (LE_T1_POK | LE_T1_OWN);
ib                587 drivers/net/ethernet/amd/a2065.c 	volatile struct lance_init_block *ib = lp->init_block;
ib                588 drivers/net/ethernet/amd/a2065.c 	volatile u16 *mcast_table = (u16 *)&ib->filter;
ib                594 drivers/net/ethernet/amd/a2065.c 		ib->filter[0] = 0xffffffff;
ib                595 drivers/net/ethernet/amd/a2065.c 		ib->filter[1] = 0xffffffff;
ib                599 drivers/net/ethernet/amd/a2065.c 	ib->filter[0] = 0;
ib                600 drivers/net/ethernet/amd/a2065.c 	ib->filter[1] = 0;
ib                613 drivers/net/ethernet/amd/a2065.c 	volatile struct lance_init_block *ib = lp->init_block;
ib                632 drivers/net/ethernet/amd/a2065.c 		ib->mode |= LE_MO_PROM;
ib                634 drivers/net/ethernet/amd/a2065.c 		ib->mode &= ~LE_MO_PROM;
ib                235 drivers/net/ethernet/amd/declance.c #define lib_ptr(ib, rt, type) 						\
ib                236 drivers/net/ethernet/amd/declance.c 	((volatile u16 *)((u8 *)(ib) + lib_off(rt, type)))
ib                453 drivers/net/ethernet/amd/declance.c 	volatile u16 *ib = (volatile u16 *)dev->mem_start;
ib                465 drivers/net/ethernet/amd/declance.c 	*lib_ptr(ib, phys_addr[0], lp->type) = (dev->dev_addr[1] << 8) |
ib                467 drivers/net/ethernet/amd/declance.c 	*lib_ptr(ib, phys_addr[1], lp->type) = (dev->dev_addr[3] << 8) |
ib                469 drivers/net/ethernet/amd/declance.c 	*lib_ptr(ib, phys_addr[2], lp->type) = (dev->dev_addr[5] << 8) |
ib                475 drivers/net/ethernet/amd/declance.c 	*lib_ptr(ib, rx_len, lp->type) = (LANCE_LOG_RX_BUFFERS << 13) |
ib                477 drivers/net/ethernet/amd/declance.c 	*lib_ptr(ib, rx_ptr, lp->type) = leptr;
ib                484 drivers/net/ethernet/amd/declance.c 	*lib_ptr(ib, tx_len, lp->type) = (LANCE_LOG_TX_BUFFERS << 13) |
ib                486 drivers/net/ethernet/amd/declance.c 	*lib_ptr(ib, tx_ptr, lp->type) = leptr;
ib                497 drivers/net/ethernet/amd/declance.c 		*lib_ptr(ib, btx_ring[i].tmd0, lp->type) = leptr;
ib                498 drivers/net/ethernet/amd/declance.c 		*lib_ptr(ib, btx_ring[i].tmd1, lp->type) = (leptr >> 16) &
ib                500 drivers/net/ethernet/amd/declance.c 		*lib_ptr(ib, btx_ring[i].length, lp->type) = 0xf000;
ib                502 drivers/net/ethernet/amd/declance.c 		*lib_ptr(ib, btx_ring[i].misc, lp->type) = 0;
ib                513 drivers/net/ethernet/amd/declance.c 		*lib_ptr(ib, brx_ring[i].rmd0, lp->type) = leptr;
ib                514 drivers/net/ethernet/amd/declance.c 		*lib_ptr(ib, brx_ring[i].rmd1, lp->type) = ((leptr >> 16) &
ib                517 drivers/net/ethernet/amd/declance.c 		*lib_ptr(ib, brx_ring[i].length, lp->type) = -RX_BUFF_SIZE |
ib                519 drivers/net/ethernet/amd/declance.c 		*lib_ptr(ib, brx_ring[i].mblength, lp->type) = 0;
ib                559 drivers/net/ethernet/amd/declance.c 	volatile u16 *ib = (volatile u16 *)dev->mem_start;
ib                572 drivers/net/ethernet/amd/declance.c 				printk("%s", *lib_ptr(ib, brx_ring[i].rmd1,
ib                576 drivers/net/ethernet/amd/declance.c 				printk("%s", *lib_ptr(ib, brx_ring[i].rmd1,
ib                584 drivers/net/ethernet/amd/declance.c 	for (rd = lib_ptr(ib, brx_ring[lp->rx_new], lp->type);
ib                586 drivers/net/ethernet/amd/declance.c 	     rd = lib_ptr(ib, brx_ring[lp->rx_new], lp->type)) {
ib                646 drivers/net/ethernet/amd/declance.c 	volatile u16 *ib = (volatile u16 *)dev->mem_start;
ib                657 drivers/net/ethernet/amd/declance.c 		td = lib_ptr(ib, btx_ring[i], lp->type);
ib                782 drivers/net/ethernet/amd/declance.c 	volatile u16 *ib = (volatile u16 *)dev->mem_start;
ib                797 drivers/net/ethernet/amd/declance.c 	*lib_ptr(ib, mode, lp->type) = 0;
ib                798 drivers/net/ethernet/amd/declance.c 	*lib_ptr(ib, filter[0], lp->type) = 0;
ib                799 drivers/net/ethernet/amd/declance.c 	*lib_ptr(ib, filter[1], lp->type) = 0;
ib                800 drivers/net/ethernet/amd/declance.c 	*lib_ptr(ib, filter[2], lp->type) = 0;
ib                801 drivers/net/ethernet/amd/declance.c 	*lib_ptr(ib, filter[3], lp->type) = 0;
ib                902 drivers/net/ethernet/amd/declance.c 	volatile u16 *ib = (volatile u16 *)dev->mem_start;
ib                919 drivers/net/ethernet/amd/declance.c 	*lib_ptr(ib, btx_ring[entry].length, lp->type) = (-len);
ib                920 drivers/net/ethernet/amd/declance.c 	*lib_ptr(ib, btx_ring[entry].misc, lp->type) = 0;
ib                925 drivers/net/ethernet/amd/declance.c 	*lib_ptr(ib, btx_ring[entry].tmd1, lp->type) =
ib                946 drivers/net/ethernet/amd/declance.c 	volatile u16 *ib = (volatile u16 *)dev->mem_start;
ib                952 drivers/net/ethernet/amd/declance.c 		*lib_ptr(ib, filter[0], lp->type) = 0xffff;
ib                953 drivers/net/ethernet/amd/declance.c 		*lib_ptr(ib, filter[1], lp->type) = 0xffff;
ib                954 drivers/net/ethernet/amd/declance.c 		*lib_ptr(ib, filter[2], lp->type) = 0xffff;
ib                955 drivers/net/ethernet/amd/declance.c 		*lib_ptr(ib, filter[3], lp->type) = 0xffff;
ib                959 drivers/net/ethernet/amd/declance.c 	*lib_ptr(ib, filter[0], lp->type) = 0;
ib                960 drivers/net/ethernet/amd/declance.c 	*lib_ptr(ib, filter[1], lp->type) = 0;
ib                961 drivers/net/ethernet/amd/declance.c 	*lib_ptr(ib, filter[2], lp->type) = 0;
ib                962 drivers/net/ethernet/amd/declance.c 	*lib_ptr(ib, filter[3], lp->type) = 0;
ib                968 drivers/net/ethernet/amd/declance.c 		*lib_ptr(ib, filter[crc >> 4], lp->type) |= 1 << (crc & 0xf);
ib                975 drivers/net/ethernet/amd/declance.c 	volatile u16 *ib = (volatile u16 *)dev->mem_start;
ib                995 drivers/net/ethernet/amd/declance.c 		*lib_ptr(ib, mode, lp->type) |= LE_MO_PROM;
ib                997 drivers/net/ethernet/amd/declance.c 		*lib_ptr(ib, mode, lp->type) &= ~LE_MO_PROM;
ib                226 drivers/net/ethernet/amd/ni65.c 	struct init_block ib;
ib                577 drivers/net/ethernet/amd/ni65.c 		p->ib.eaddr[i] = daddr[i];
ib                580 drivers/net/ethernet/amd/ni65.c 		p->ib.filter[i] = filter;
ib                581 drivers/net/ethernet/amd/ni65.c 	p->ib.mode = mode;
ib                583 drivers/net/ethernet/amd/ni65.c 	p->ib.trp = (u32) isa_virt_to_bus(p->tmdhead) | TMDNUMMASK;
ib                584 drivers/net/ethernet/amd/ni65.c 	p->ib.rrp = (u32) isa_virt_to_bus(p->rmdhead) | RMDNUMMASK;
ib                586 drivers/net/ethernet/amd/ni65.c 	pib = (u32) isa_virt_to_bus(&p->ib);
ib               2686 drivers/net/ethernet/amd/pcnet32.c 	volatile struct pcnet32_init_block *ib = lp->init_block;
ib               2687 drivers/net/ethernet/amd/pcnet32.c 	volatile __le16 *mcast_table = (__le16 *)ib->filter;
ib               2695 drivers/net/ethernet/amd/pcnet32.c 		ib->filter[0] = cpu_to_le32(~0U);
ib               2696 drivers/net/ethernet/amd/pcnet32.c 		ib->filter[1] = cpu_to_le32(~0U);
ib               2704 drivers/net/ethernet/amd/pcnet32.c 	ib->filter[0] = 0;
ib               2705 drivers/net/ethernet/amd/pcnet32.c 	ib->filter[1] = 0;
ib                324 drivers/net/ethernet/amd/sunlance.c 	struct lance_init_block *ib = lp->init_block_mem;
ib                337 drivers/net/ethernet/amd/sunlance.c 	ib->phys_addr [0] = dev->dev_addr [1];
ib                338 drivers/net/ethernet/amd/sunlance.c 	ib->phys_addr [1] = dev->dev_addr [0];
ib                339 drivers/net/ethernet/amd/sunlance.c 	ib->phys_addr [2] = dev->dev_addr [3];
ib                340 drivers/net/ethernet/amd/sunlance.c 	ib->phys_addr [3] = dev->dev_addr [2];
ib                341 drivers/net/ethernet/amd/sunlance.c 	ib->phys_addr [4] = dev->dev_addr [5];
ib                342 drivers/net/ethernet/amd/sunlance.c 	ib->phys_addr [5] = dev->dev_addr [4];
ib                347 drivers/net/ethernet/amd/sunlance.c 		ib->btx_ring [i].tmd0      = leptr;
ib                348 drivers/net/ethernet/amd/sunlance.c 		ib->btx_ring [i].tmd1_hadr = leptr >> 16;
ib                349 drivers/net/ethernet/amd/sunlance.c 		ib->btx_ring [i].tmd1_bits = 0;
ib                350 drivers/net/ethernet/amd/sunlance.c 		ib->btx_ring [i].length    = 0xf000; /* The ones required by tmd2 */
ib                351 drivers/net/ethernet/amd/sunlance.c 		ib->btx_ring [i].misc      = 0;
ib                358 drivers/net/ethernet/amd/sunlance.c 		ib->brx_ring [i].rmd0      = leptr;
ib                359 drivers/net/ethernet/amd/sunlance.c 		ib->brx_ring [i].rmd1_hadr = leptr >> 16;
ib                360 drivers/net/ethernet/amd/sunlance.c 		ib->brx_ring [i].rmd1_bits = LE_R1_OWN;
ib                361 drivers/net/ethernet/amd/sunlance.c 		ib->brx_ring [i].length    = -RX_BUFF_SIZE | 0xf000;
ib                362 drivers/net/ethernet/amd/sunlance.c 		ib->brx_ring [i].mblength  = 0;
ib                369 drivers/net/ethernet/amd/sunlance.c 	ib->rx_len = (LANCE_LOG_RX_BUFFERS << 13) | (leptr >> 16);
ib                370 drivers/net/ethernet/amd/sunlance.c 	ib->rx_ptr = leptr;
ib                374 drivers/net/ethernet/amd/sunlance.c 	ib->tx_len = (LANCE_LOG_TX_BUFFERS << 13) | (leptr >> 16);
ib                375 drivers/net/ethernet/amd/sunlance.c 	ib->tx_ptr = leptr;
ib                381 drivers/net/ethernet/amd/sunlance.c 	struct lance_init_block __iomem *ib = lp->init_block_iomem;
ib                393 drivers/net/ethernet/amd/sunlance.c 	sbus_writeb(dev->dev_addr[1], &ib->phys_addr[0]);
ib                394 drivers/net/ethernet/amd/sunlance.c 	sbus_writeb(dev->dev_addr[0], &ib->phys_addr[1]);
ib                395 drivers/net/ethernet/amd/sunlance.c 	sbus_writeb(dev->dev_addr[3], &ib->phys_addr[2]);
ib                396 drivers/net/ethernet/amd/sunlance.c 	sbus_writeb(dev->dev_addr[2], &ib->phys_addr[3]);
ib                397 drivers/net/ethernet/amd/sunlance.c 	sbus_writeb(dev->dev_addr[5], &ib->phys_addr[4]);
ib                398 drivers/net/ethernet/amd/sunlance.c 	sbus_writeb(dev->dev_addr[4], &ib->phys_addr[5]);
ib                403 drivers/net/ethernet/amd/sunlance.c 		sbus_writew(leptr,	&ib->btx_ring [i].tmd0);
ib                404 drivers/net/ethernet/amd/sunlance.c 		sbus_writeb(leptr >> 16,&ib->btx_ring [i].tmd1_hadr);
ib                405 drivers/net/ethernet/amd/sunlance.c 		sbus_writeb(0,		&ib->btx_ring [i].tmd1_bits);
ib                408 drivers/net/ethernet/amd/sunlance.c 		sbus_writew(0xf000,	&ib->btx_ring [i].length);
ib                409 drivers/net/ethernet/amd/sunlance.c 		sbus_writew(0,		&ib->btx_ring [i].misc);
ib                416 drivers/net/ethernet/amd/sunlance.c 		sbus_writew(leptr,	&ib->brx_ring [i].rmd0);
ib                417 drivers/net/ethernet/amd/sunlance.c 		sbus_writeb(leptr >> 16,&ib->brx_ring [i].rmd1_hadr);
ib                418 drivers/net/ethernet/amd/sunlance.c 		sbus_writeb(LE_R1_OWN,	&ib->brx_ring [i].rmd1_bits);
ib                420 drivers/net/ethernet/amd/sunlance.c 			    &ib->brx_ring [i].length);
ib                421 drivers/net/ethernet/amd/sunlance.c 		sbus_writew(0,		&ib->brx_ring [i].mblength);
ib                429 drivers/net/ethernet/amd/sunlance.c 		    &ib->rx_len);
ib                430 drivers/net/ethernet/amd/sunlance.c 	sbus_writew(leptr, &ib->rx_ptr);
ib                435 drivers/net/ethernet/amd/sunlance.c 		    &ib->tx_len);
ib                436 drivers/net/ethernet/amd/sunlance.c 	sbus_writew(leptr, &ib->tx_ptr);
ib                511 drivers/net/ethernet/amd/sunlance.c 	struct lance_init_block *ib = lp->init_block_mem;
ib                517 drivers/net/ethernet/amd/sunlance.c 	for (rd = &ib->brx_ring [entry];
ib                519 drivers/net/ethernet/amd/sunlance.c 	     rd = &ib->brx_ring [entry]) {
ib                551 drivers/net/ethernet/amd/sunlance.c 					 (unsigned char *)&(ib->rx_buf [entry][0]),
ib                570 drivers/net/ethernet/amd/sunlance.c 	struct lance_init_block *ib = lp->init_block_mem;
ib                577 drivers/net/ethernet/amd/sunlance.c 		struct lance_tx_desc *td = &ib->btx_ring [i];
ib                680 drivers/net/ethernet/amd/sunlance.c 	struct lance_init_block __iomem *ib = lp->init_block_iomem;
ib                687 drivers/net/ethernet/amd/sunlance.c 	for (rd = &ib->brx_ring [entry];
ib                689 drivers/net/ethernet/amd/sunlance.c 	     rd = &ib->brx_ring [entry]) {
ib                720 drivers/net/ethernet/amd/sunlance.c 			lance_piocopy_to_skb(skb, &(ib->rx_buf[entry][0]), len);
ib                738 drivers/net/ethernet/amd/sunlance.c 	struct lance_init_block __iomem *ib = lp->init_block_iomem;
ib                745 drivers/net/ethernet/amd/sunlance.c 		struct lance_tx_desc __iomem *td = &ib->btx_ring [i];
ib                886 drivers/net/ethernet/amd/sunlance.c 		struct lance_init_block __iomem *ib = lp->init_block_iomem;
ib                887 drivers/net/ethernet/amd/sunlance.c 		u16 __iomem *packet = (u16 __iomem *) &(ib->tx_buf[entry][0]);
ib                895 drivers/net/ethernet/amd/sunlance.c 		sbus_writew((-ETH_ZLEN) | 0xf000, &ib->btx_ring[entry].length);
ib                896 drivers/net/ethernet/amd/sunlance.c 		sbus_writew(0, &ib->btx_ring[entry].misc);
ib                897 drivers/net/ethernet/amd/sunlance.c 		sbus_writeb(LE_T1_POK|LE_T1_OWN, &ib->btx_ring[entry].tmd1_bits);
ib                899 drivers/net/ethernet/amd/sunlance.c 		struct lance_init_block *ib = lp->init_block_mem;
ib                900 drivers/net/ethernet/amd/sunlance.c 		u16 *packet = (u16 *) &(ib->tx_buf[entry][0]);
ib                907 drivers/net/ethernet/amd/sunlance.c 		ib->btx_ring[entry].length = (-ETH_ZLEN) | 0xf000;
ib                908 drivers/net/ethernet/amd/sunlance.c 		ib->btx_ring[entry].misc = 0;
ib                909 drivers/net/ethernet/amd/sunlance.c 		ib->btx_ring[entry].tmd1_bits = (LE_T1_POK|LE_T1_OWN);
ib                941 drivers/net/ethernet/amd/sunlance.c 		struct lance_init_block __iomem *ib = lp->init_block_iomem;
ib                942 drivers/net/ethernet/amd/sunlance.c 		sbus_writew(0, &ib->mode);
ib                943 drivers/net/ethernet/amd/sunlance.c 		sbus_writel(0, &ib->filter[0]);
ib                944 drivers/net/ethernet/amd/sunlance.c 		sbus_writel(0, &ib->filter[1]);
ib                946 drivers/net/ethernet/amd/sunlance.c 		struct lance_init_block *ib = lp->init_block_mem;
ib                947 drivers/net/ethernet/amd/sunlance.c 		ib->mode = 0;
ib                948 drivers/net/ethernet/amd/sunlance.c 		ib->filter [0] = 0;
ib                949 drivers/net/ethernet/amd/sunlance.c 		ib->filter [1] = 0;
ib               1125 drivers/net/ethernet/amd/sunlance.c 		struct lance_init_block __iomem *ib = lp->init_block_iomem;
ib               1126 drivers/net/ethernet/amd/sunlance.c 		sbus_writew((-len) | 0xf000, &ib->btx_ring[entry].length);
ib               1127 drivers/net/ethernet/amd/sunlance.c 		sbus_writew(0, &ib->btx_ring[entry].misc);
ib               1128 drivers/net/ethernet/amd/sunlance.c 		lance_piocopy_from_skb(&ib->tx_buf[entry][0], skb->data, skblen);
ib               1130 drivers/net/ethernet/amd/sunlance.c 			lance_piozero(&ib->tx_buf[entry][skblen], len - skblen);
ib               1131 drivers/net/ethernet/amd/sunlance.c 		sbus_writeb(LE_T1_POK | LE_T1_OWN, &ib->btx_ring[entry].tmd1_bits);
ib               1133 drivers/net/ethernet/amd/sunlance.c 		struct lance_init_block *ib = lp->init_block_mem;
ib               1134 drivers/net/ethernet/amd/sunlance.c 		ib->btx_ring [entry].length = (-len) | 0xf000;
ib               1135 drivers/net/ethernet/amd/sunlance.c 		ib->btx_ring [entry].misc = 0;
ib               1136 drivers/net/ethernet/amd/sunlance.c 		skb_copy_from_linear_data(skb, &ib->tx_buf [entry][0], skblen);
ib               1138 drivers/net/ethernet/amd/sunlance.c 			memset((char *) &ib->tx_buf [entry][skblen], 0, len - skblen);
ib               1139 drivers/net/ethernet/amd/sunlance.c 		ib->btx_ring [entry].tmd1_bits = (LE_T1_POK | LE_T1_OWN);
ib               1178 drivers/net/ethernet/amd/sunlance.c 		struct lance_init_block __iomem *ib = lp->init_block_iomem;
ib               1179 drivers/net/ethernet/amd/sunlance.c 		sbus_writel(val, &ib->filter[0]);
ib               1180 drivers/net/ethernet/amd/sunlance.c 		sbus_writel(val, &ib->filter[1]);
ib               1182 drivers/net/ethernet/amd/sunlance.c 		struct lance_init_block *ib = lp->init_block_mem;
ib               1183 drivers/net/ethernet/amd/sunlance.c 		ib->filter [0] = val;
ib               1184 drivers/net/ethernet/amd/sunlance.c 		ib->filter [1] = val;
ib               1195 drivers/net/ethernet/amd/sunlance.c 			struct lance_init_block __iomem *ib = lp->init_block_iomem;
ib               1196 drivers/net/ethernet/amd/sunlance.c 			u16 __iomem *mcast_table = (u16 __iomem *) &ib->filter;
ib               1201 drivers/net/ethernet/amd/sunlance.c 			struct lance_init_block *ib = lp->init_block_mem;
ib               1202 drivers/net/ethernet/amd/sunlance.c 			u16 *mcast_table = (u16 *) &ib->filter;
ib                423 drivers/net/ethernet/brocade/bna/bfi_enet.h 		struct bfi_enet_ib	ib;
ib                496 drivers/net/ethernet/brocade/bna/bfi_enet.h 		struct bfi_enet_ib	ib;
ib                236 drivers/net/ethernet/brocade/bna/bna_hw_defs.h 	struct bna_ib *ib = _ib;					\
ib                237 drivers/net/ethernet/brocade/bna/bna_hw_defs.h 	if ((ib->intr_type == BNA_INTR_T_INTX)) {			\
ib                239 drivers/net/ethernet/brocade/bna/bna_hw_defs.h 		intx_mask &= ~(ib->intr_vector);			\
ib                242 drivers/net/ethernet/brocade/bna/bna_hw_defs.h 	bna_ib_coalescing_timer_set(&ib->door_bell,			\
ib                243 drivers/net/ethernet/brocade/bna/bna_hw_defs.h 			ib->coalescing_timeo);				\
ib                245 drivers/net/ethernet/brocade/bna/bna_hw_defs.h 		bna_ib_ack(&ib->door_bell, 0);				\
ib                251 drivers/net/ethernet/brocade/bna/bna_hw_defs.h 	struct bna_ib *ib = _ib;					\
ib                253 drivers/net/ethernet/brocade/bna/bna_hw_defs.h 		ib->door_bell.doorbell_addr);				\
ib                254 drivers/net/ethernet/brocade/bna/bna_hw_defs.h 	if (ib->intr_type == BNA_INTR_T_INTX) {				\
ib                256 drivers/net/ethernet/brocade/bna/bna_hw_defs.h 		intx_mask |= ib->intr_vector;				\
ib                 16 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_ib_coalescing_timeo_set(struct bna_ib *ib, u8 coalescing_timeo)
ib                 18 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 	ib->coalescing_timeo = coalescing_timeo;
ib                 19 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 	ib->door_bell.doorbell_ack = BNA_DOORBELL_IB_INT_ACK(
ib                 20 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 				(u32)ib->coalescing_timeo, 0);
ib               1475 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 		bna_ib_start(rx->bna, &rxp->cq.ib, is_regular);
ib               1667 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 		cfg_req->q_cfg[i].ib.index_addr.a32.addr_lo =
ib               1668 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 			rxp->cq.ib.ib_seg_host_addr.lsb;
ib               1669 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 		cfg_req->q_cfg[i].ib.index_addr.a32.addr_hi =
ib               1670 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 			rxp->cq.ib.ib_seg_host_addr.msb;
ib               1671 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 		cfg_req->q_cfg[i].ib.intr.msix_index =
ib               1672 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 			htons((u16)rxp->cq.ib.intr_vector);
ib               1679 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 	cfg_req->ib_cfg.msix = (rxp->cq.ib.intr_type == BNA_INTR_T_MSIX)
ib               1683 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 			htonl((u32)rxp->cq.ib.coalescing_timeo);
ib               1685 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 			htonl((u32)rxp->cq.ib.interpkt_timeo);
ib               1686 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 	cfg_req->ib_cfg.inter_pkt_count = (u8)rxp->cq.ib.interpkt_count;
ib               1735 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 		bna_ib_stop(rx->bna, &rxp->cq.ib);
ib               2360 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 		rxp->cq.ib.ib_seg_host_addr.lsb =
ib               2362 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 		rxp->cq.ib.ib_seg_host_addr.msb =
ib               2364 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 		rxp->cq.ib.ib_seg_host_addr_kva =
ib               2366 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 		rxp->cq.ib.intr_type = intr_info->intr_type;
ib               2368 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 			rxp->cq.ib.intr_vector = rxp->vector;
ib               2370 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 			rxp->cq.ib.intr_vector = BIT(rxp->vector);
ib               2371 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 		rxp->cq.ib.coalescing_timeo = rx_cfg->coalescing_timeo;
ib               2372 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 		rxp->cq.ib.interpkt_count = BFI_RX_INTERPKT_COUNT;
ib               2373 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 		rxp->cq.ib.interpkt_timeo = BFI_RX_INTERPKT_TIMEO;
ib               2453 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 			(u32 *)rxp->cq.ib.ib_seg_host_addr_kva;
ib               2454 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 		rxp->cq.ccb->i_dbell = &rxp->cq.ib.door_bell;
ib               2455 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 		rxp->cq.ccb->intr_type = rxp->cq.ib.intr_type;
ib               2456 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 		rxp->cq.ccb->intr_vector = rxp->cq.ib.intr_vector;
ib               2458 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 			rxp->cq.ib.coalescing_timeo;
ib               2680 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 		bna_ib_coalescing_timeo_set(&rxp->cq.ib, coalescing_timeo);
ib               2742 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 	bna_ib_coalescing_timeo_set(&ccb->cq->ib, coalescing_timeo);
ib               2876 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 		bna_ib_start(tx->bna, &txq->ib, is_regular);
ib               3104 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 		cfg_req->q_cfg[i].ib.index_addr.a32.addr_lo =
ib               3105 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 			txq->ib.ib_seg_host_addr.lsb;
ib               3106 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 		cfg_req->q_cfg[i].ib.index_addr.a32.addr_hi =
ib               3107 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 			txq->ib.ib_seg_host_addr.msb;
ib               3108 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 		cfg_req->q_cfg[i].ib.intr.msix_index =
ib               3109 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 			htons((u16)txq->ib.intr_vector);
ib               3116 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 	cfg_req->ib_cfg.msix = (txq->ib.intr_type == BNA_INTR_T_MSIX)
ib               3119 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 			htonl((u32)txq->ib.coalescing_timeo);
ib               3121 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 			htonl((u32)txq->ib.interpkt_timeo);
ib               3122 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 	cfg_req->ib_cfg.inter_pkt_count = (u8)txq->ib.interpkt_count;
ib               3155 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 		bna_ib_stop(tx->bna, &txq->ib);
ib               3448 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 		txq->ib.ib_seg_host_addr.lsb =
ib               3450 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 		txq->ib.ib_seg_host_addr.msb =
ib               3452 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 		txq->ib.ib_seg_host_addr_kva =
ib               3454 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 		txq->ib.intr_type = intr_info->intr_type;
ib               3455 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 		txq->ib.intr_vector = (intr_info->num == 1) ?
ib               3459 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 			txq->ib.intr_vector = BIT(txq->ib.intr_vector);
ib               3460 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 		txq->ib.coalescing_timeo = tx_cfg->coalescing_timeo;
ib               3461 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 		txq->ib.interpkt_timeo = BFI_TX_INTERPKT_TIMEO;
ib               3462 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 		txq->ib.interpkt_count = BFI_TX_INTERPKT_COUNT;
ib               3470 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 			(u32 *)txq->ib.ib_seg_host_addr_kva;
ib               3471 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 		txq->tcb->i_dbell = &txq->ib.door_bell;
ib               3472 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 		txq->tcb->intr_type = txq->ib.intr_type;
ib               3473 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 		txq->tcb->intr_vector = txq->ib.intr_vector;
ib               3666 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 		bna_ib_coalescing_timeo_set(&txq->ib, coalescing_timeo);
ib                446 drivers/net/ethernet/brocade/bna/bna_types.h 	struct bna_ib ib;
ib                638 drivers/net/ethernet/brocade/bna/bna_types.h 	struct bna_ib ib;
ib                220 drivers/net/ethernet/chelsio/cxgb/vsc7326.c static void run_table(adapter_t *adapter, struct init_table *ib, int len)
ib                225 drivers/net/ethernet/chelsio/cxgb/vsc7326.c 		if (ib[i].addr == INITBLOCK_SLEEP) {
ib                226 drivers/net/ethernet/chelsio/cxgb/vsc7326.c 			udelay( ib[i].data );
ib                227 drivers/net/ethernet/chelsio/cxgb/vsc7326.c 			pr_err("sleep %d us\n",ib[i].data);
ib                229 drivers/net/ethernet/chelsio/cxgb/vsc7326.c 			vsc_write( adapter, ib[i].addr, ib[i].data );
ib               1861 drivers/net/ethernet/dec/tulip/de2104x.c 		struct de_srom_media_block *ib = bufp;
ib               1865 drivers/net/ethernet/dec/tulip/de2104x.c 		switch(ib->opts & MediaBlockMask) {
ib               1897 drivers/net/ethernet/dec/tulip/de2104x.c 		bufp += sizeof (ib->opts);
ib               1899 drivers/net/ethernet/dec/tulip/de2104x.c 		if (ib->opts & MediaCustomCSRs) {
ib               1900 drivers/net/ethernet/dec/tulip/de2104x.c 			de->media[idx].csr13 = get_unaligned(&ib->csr13);
ib               1901 drivers/net/ethernet/dec/tulip/de2104x.c 			de->media[idx].csr14 = get_unaligned(&ib->csr14);
ib               1902 drivers/net/ethernet/dec/tulip/de2104x.c 			de->media[idx].csr15 = get_unaligned(&ib->csr15);
ib               1903 drivers/net/ethernet/dec/tulip/de2104x.c 			bufp += sizeof(ib->csr13) + sizeof(ib->csr14) +
ib               1904 drivers/net/ethernet/dec/tulip/de2104x.c 				sizeof(ib->csr15);
ib                862 drivers/net/ethernet/mellanox/mlx4/mcg.c 		rule_hw->ib.l3_qpn = spec->ib.l3_qpn;
ib                863 drivers/net/ethernet/mellanox/mlx4/mcg.c 		rule_hw->ib.qpn_mask = spec->ib.qpn_msk;
ib                864 drivers/net/ethernet/mellanox/mlx4/mcg.c 		memcpy(&rule_hw->ib.dst_gid, &spec->ib.dst_gid, 16);
ib                865 drivers/net/ethernet/mellanox/mlx4/mcg.c 		memcpy(&rule_hw->ib.dst_gid_msk, &spec->ib.dst_gid_msk, 16);
ib                953 drivers/net/ethernet/mellanox/mlx4/mcg.c 					"dst-gid = %pI6\n", cur->ib.dst_gid);
ib                956 drivers/net/ethernet/mellanox/mlx4/mcg.c 					cur->ib.dst_gid_msk);
ib               1396 drivers/net/ethernet/mellanox/mlx4/mcg.c 			memcpy(spec.ib.dst_gid, gid, 16);
ib               1397 drivers/net/ethernet/mellanox/mlx4/mcg.c 			memset(&spec.ib.dst_gid_msk, 0xff, 16);
ib               1129 drivers/pci/controller/pcie-iproc.c 	struct iproc_pcie_ib *ib = &pcie->ib;
ib               1136 drivers/pci/controller/pcie-iproc.c 	for (region_idx = 0; region_idx < ib->nr_regions; region_idx++) {
ib               1446 drivers/pci/controller/pcie-iproc.c 		pcie->ib.nr_regions = ARRAY_SIZE(paxb_v2_ib_map);
ib                102 drivers/pci/controller/pcie-iproc.h 	struct iproc_pcie_ib ib;
ib                140 drivers/pinctrl/meson/pinctrl-meson.h #define BANK_DS(n, f, l, fi, li, per, peb, pr, pb, dr, db, or, ob, ir, ib,     \
ib                153 drivers/pinctrl/meson/pinctrl-meson.h 			[REG_IN]	= { ir, ib },			\
ib                158 drivers/pinctrl/meson/pinctrl-meson.h #define BANK(n, f, l, fi, li, per, peb, pr, pb, dr, db, or, ob, ir, ib) \
ib                159 drivers/pinctrl/meson/pinctrl-meson.h 	BANK_DS(n, f, l, fi, li, per, peb, pr, pb, dr, db, or, ob, ir, ib, 0, 0)
ib                247 drivers/s390/char/fs3270.c 	struct idal_buffer *ib;
ib                255 drivers/s390/char/fs3270.c 	ib = idal_buffer_alloc(count, 0);
ib                256 drivers/s390/char/fs3270.c 	if (IS_ERR(ib))
ib                263 drivers/s390/char/fs3270.c 		raw3270_request_set_idal(rq, ib);
ib                270 drivers/s390/char/fs3270.c 				if (idal_buffer_to_user(ib, data, count) != 0)
ib                280 drivers/s390/char/fs3270.c 	idal_buffer_free(ib);
ib                292 drivers/s390/char/fs3270.c 	struct idal_buffer *ib;
ib                299 drivers/s390/char/fs3270.c 	ib = idal_buffer_alloc(count, 0);
ib                300 drivers/s390/char/fs3270.c 	if (IS_ERR(ib))
ib                304 drivers/s390/char/fs3270.c 		if (idal_buffer_from_user(ib, data, count) == 0) {
ib                309 drivers/s390/char/fs3270.c 			raw3270_request_set_idal(rq, ib);
ib                318 drivers/s390/char/fs3270.c 	idal_buffer_free(ib);
ib                433 drivers/s390/char/fs3270.c 	struct idal_buffer *ib;
ib                474 drivers/s390/char/fs3270.c 	ib = idal_buffer_alloc(2*fp->view.rows*fp->view.cols + 5, 0);
ib                475 drivers/s390/char/fs3270.c 	if (IS_ERR(ib)) {
ib                478 drivers/s390/char/fs3270.c 		rc = PTR_ERR(ib);
ib                481 drivers/s390/char/fs3270.c 	fp->rdbuf = ib;
ib                227 drivers/s390/char/raw3270.c raw3270_request_set_idal(struct raw3270_request *rq, struct idal_buffer *ib)
ib                229 drivers/s390/char/raw3270.c 	rq->ccw.cda = __pa(ib->data);
ib                230 drivers/s390/char/raw3270.c 	rq->ccw.count = ib->size;
ib                 78 drivers/s390/char/sclp_early.c 		sclp.has_ib = cpue->ib;
ib                102 drivers/staging/isdn/hysdn/hysdn_proclog.c 	struct log_data *ib;
ib                115 drivers/staging/isdn/hysdn/hysdn_proclog.c 	if (!(ib = kmalloc(sizeof(struct log_data) + strlen(cp), GFP_ATOMIC)))
ib                117 drivers/staging/isdn/hysdn/hysdn_proclog.c 	strcpy(ib->log_start, cp);	/* set output string */
ib                118 drivers/staging/isdn/hysdn/hysdn_proclog.c 	ib->next = NULL;
ib                119 drivers/staging/isdn/hysdn/hysdn_proclog.c 	ib->proc_ctrl = pd;	/* point to own control structure */
ib                121 drivers/staging/isdn/hysdn/hysdn_proclog.c 	ib->usage_cnt = pd->if_used;
ib                123 drivers/staging/isdn/hysdn/hysdn_proclog.c 		pd->log_head = ib;	/* new head */
ib                125 drivers/staging/isdn/hysdn/hysdn_proclog.c 		pd->log_tail->next = ib;	/* follows existing messages */
ib                126 drivers/staging/isdn/hysdn/hysdn_proclog.c 	pd->log_tail = ib;	/* new tail */
ib                132 drivers/staging/isdn/hysdn/hysdn_proclog.c 			ib = pd->log_head;
ib                134 drivers/staging/isdn/hysdn/hysdn_proclog.c 			kfree(ib);
ib                 99 drivers/thermal/tegra/tegra-bpmp-thermal.c 	req = (struct mrq_thermal_bpmp_to_host_request *)ch->ib->data;
ib                833 drivers/video/fbdev/matrox/matroxfb_maven.c 			unsigned int ib;
ib                870 drivers/video/fbdev/matrox/matroxfb_maven.c 				ib = ((0x3C0000 * i - 0x8000)/ hdec + 0x05E7) >> 8;
ib                872 drivers/video/fbdev/matrox/matroxfb_maven.c 			} while (ib < ibmin);
ib                873 drivers/video/fbdev/matrox/matroxfb_maven.c 			if (ib >= m->htotal + 2) {
ib                874 drivers/video/fbdev/matrox/matroxfb_maven.c 				ib = ibmin;
ib                880 drivers/video/fbdev/matrox/matroxfb_maven.c 			m->regs[0x9E] = ib;
ib                881 drivers/video/fbdev/matrox/matroxfb_maven.c 			m->regs[0x9F] = ib >> 8;
ib                358 fs/xfs/xfs_aops.c 	struct xfs_ioend	*ib;
ib                361 fs/xfs/xfs_aops.c 	ib = container_of(b, struct xfs_ioend, io_list);
ib                362 fs/xfs/xfs_aops.c 	if (ia->io_offset < ib->io_offset)
ib                364 fs/xfs/xfs_aops.c 	else if (ia->io_offset > ib->io_offset)
ib                818 include/linux/mlx4/device.h 	struct mlx4_av		ib;
ib               1254 include/linux/mlx4/device.h 		struct mlx4_spec_ib ib;
ib               1361 include/linux/mlx4/device.h 		struct mlx4_net_trans_rule_hw_ib ib;
ib                 93 include/net/inet_hashtables.h static inline struct net *ib_net(struct inet_bind_bucket *ib)
ib                 95 include/net/inet_hashtables.h 	return read_pnet(&ib->ib_net);
ib                211 include/rdma/ib_sa.h 		struct sa_path_rec_ib ib;
ib                244 include/rdma/ib_sa.h static inline void path_conv_opa_to_ib(struct sa_path_rec *ib,
ib                252 include/rdma/ib_sa.h 		ib->dgid.global.interface_id
ib                254 include/rdma/ib_sa.h 		ib->dgid.global.subnet_prefix
ib                256 include/rdma/ib_sa.h 		ib->sgid.global.interface_id
ib                258 include/rdma/ib_sa.h 		ib->dgid.global.subnet_prefix
ib                260 include/rdma/ib_sa.h 		ib->ib.dlid	= 0;
ib                262 include/rdma/ib_sa.h 		ib->ib.slid	= 0;
ib                264 include/rdma/ib_sa.h 		ib->ib.dlid	= htons(ntohl(opa->opa.dlid));
ib                265 include/rdma/ib_sa.h 		ib->ib.slid	= htons(ntohl(opa->opa.slid));
ib                267 include/rdma/ib_sa.h 	ib->service_id		= opa->service_id;
ib                268 include/rdma/ib_sa.h 	ib->ib.raw_traffic	= opa->opa.raw_traffic;
ib                272 include/rdma/ib_sa.h 				       struct sa_path_rec *ib)
ib                276 include/rdma/ib_sa.h 	if ((ib_is_opa_gid(&ib->sgid)) ||
ib                277 include/rdma/ib_sa.h 	    (ib_is_opa_gid(&ib->dgid))) {
ib                278 include/rdma/ib_sa.h 		slid = htonl(opa_get_lid_from_gid(&ib->sgid));
ib                279 include/rdma/ib_sa.h 		dlid = htonl(opa_get_lid_from_gid(&ib->dgid));
ib                281 include/rdma/ib_sa.h 		slid = htonl(ntohs(ib->ib.slid));
ib                282 include/rdma/ib_sa.h 		dlid = htonl(ntohs(ib->ib.dlid));
ib                286 include/rdma/ib_sa.h 	opa->service_id		= ib->service_id;
ib                287 include/rdma/ib_sa.h 	opa->opa.raw_traffic	= ib->ib.raw_traffic;
ib                595 include/rdma/ib_sa.h 		rec->ib.slid = cpu_to_be16(slid);
ib                603 include/rdma/ib_sa.h 		rec->ib.dlid = cpu_to_be16(dlid);
ib                612 include/rdma/ib_sa.h 		rec->ib.raw_traffic = raw_traffic;
ib                620 include/rdma/ib_sa.h 		return htonl(ntohs(rec->ib.slid));
ib                629 include/rdma/ib_sa.h 		return htonl(ntohs(rec->ib.dlid));
ib                638 include/rdma/ib_sa.h 		return rec->ib.raw_traffic;
ib                906 include/rdma/ib_verbs.h 		struct ib_ah_attr ib;
ib               2035 include/rdma/ib_verbs.h 	struct ib_flow_spec_ib		ib;
ib               4371 include/rdma/ib_verbs.h 		attr->ib.dlid = (u16)dlid;
ib               4379 include/rdma/ib_verbs.h 		return attr->ib.dlid;
ib               4399 include/rdma/ib_verbs.h 		attr->ib.src_path_bits = src_path_bits;
ib               4407 include/rdma/ib_verbs.h 		return attr->ib.src_path_bits;
ib                 41 include/soc/tegra/bpmp.h 	struct tegra_bpmp_mb_data *ib;
ib               1416 net/sched/sch_cake.c 		u32 ib = cake_heap_get_backlog(q, i);
ib               1419 net/sched/sch_cake.c 		if (ib > pb) {
ib                669 tools/perf/util/annotate.c 	const struct ins *ib = b;
ib                671 tools/perf/util/annotate.c 	return strcmp(ia->name, ib->name);