/linux-4.4.14/drivers/gpu/drm/radeon/ |
D | radeon_ib.c | 56 struct radeon_ib *ib, struct radeon_vm *vm, in radeon_ib_get() argument 61 r = radeon_sa_bo_new(rdev, &rdev->ring_tmp_bo, &ib->sa_bo, size, 256); in radeon_ib_get() 67 radeon_sync_create(&ib->sync); in radeon_ib_get() 69 ib->ring = ring; in radeon_ib_get() 70 ib->fence = NULL; in radeon_ib_get() 71 ib->ptr = radeon_sa_bo_cpu_addr(ib->sa_bo); in radeon_ib_get() 72 ib->vm = vm; in radeon_ib_get() 77 ib->gpu_addr = ib->sa_bo->soffset + RADEON_VA_IB_OFFSET; in radeon_ib_get() 79 ib->gpu_addr = radeon_sa_bo_gpu_addr(ib->sa_bo); in radeon_ib_get() 81 ib->is_const_ib = false; in radeon_ib_get() [all …]
|
D | si_dma.c | 70 struct radeon_ib *ib, in si_dma_vm_copy_pages() argument 79 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_COPY, in si_dma_vm_copy_pages() 81 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in si_dma_vm_copy_pages() 82 ib->ptr[ib->length_dw++] = lower_32_bits(src); in si_dma_vm_copy_pages() 83 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in si_dma_vm_copy_pages() 84 ib->ptr[ib->length_dw++] = upper_32_bits(src) & 0xff; in si_dma_vm_copy_pages() 106 struct radeon_ib *ib, in si_dma_vm_write_pages() argument 120 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_WRITE, 0, 0, 0, ndw); in si_dma_vm_write_pages() 121 ib->ptr[ib->length_dw++] = pe; in si_dma_vm_write_pages() 122 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in si_dma_vm_write_pages() [all …]
|
D | ni_dma.c | 123 struct radeon_ib *ib) in cayman_dma_ring_ib_execute() argument 125 struct radeon_ring *ring = &rdev->ring[ib->ring]; in cayman_dma_ring_ib_execute() 126 unsigned vm_id = ib->vm ? ib->vm->ids[ib->ring].id : 0; in cayman_dma_ring_ib_execute() 145 radeon_ring_write(ring, (ib->gpu_addr & 0xFFFFFFE0)); in cayman_dma_ring_ib_execute() 146 radeon_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF)); in cayman_dma_ring_ib_execute() 316 struct radeon_ib *ib, in cayman_dma_vm_copy_pages() argument 327 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_COPY, in cayman_dma_vm_copy_pages() 329 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in cayman_dma_vm_copy_pages() 330 ib->ptr[ib->length_dw++] = lower_32_bits(src); in cayman_dma_vm_copy_pages() 331 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in cayman_dma_vm_copy_pages() [all …]
|
D | radeon_vce.c | 350 struct radeon_ib ib; in radeon_vce_get_create_msg() local 354 r = radeon_ib_get(rdev, ring, &ib, NULL, ib_size_dw * 4); in radeon_vce_get_create_msg() 360 dummy = ib.gpu_addr + 1024; in radeon_vce_get_create_msg() 363 ib.length_dw = 0; in radeon_vce_get_create_msg() 364 ib.ptr[ib.length_dw++] = cpu_to_le32(0x0000000c); /* len */ in radeon_vce_get_create_msg() 365 ib.ptr[ib.length_dw++] = cpu_to_le32(0x00000001); /* session cmd */ in radeon_vce_get_create_msg() 366 ib.ptr[ib.length_dw++] = cpu_to_le32(handle); in radeon_vce_get_create_msg() 368 ib.ptr[ib.length_dw++] = cpu_to_le32(0x00000030); /* len */ in radeon_vce_get_create_msg() 369 ib.ptr[ib.length_dw++] = cpu_to_le32(0x01000001); /* create cmd */ in radeon_vce_get_create_msg() 370 ib.ptr[ib.length_dw++] = cpu_to_le32(0x00000000); in radeon_vce_get_create_msg() [all …]
|
D | cik_sdma.c | 134 struct radeon_ib *ib) in cik_sdma_ring_ib_execute() argument 136 struct radeon_ring *ring = &rdev->ring[ib->ring]; in cik_sdma_ring_ib_execute() 137 u32 extra_bits = (ib->vm ? ib->vm->ids[ib->ring].id : 0) & 0xf; in cik_sdma_ring_ib_execute() 155 radeon_ring_write(ring, ib->gpu_addr & 0xffffffe0); /* base must be 32 byte aligned */ in cik_sdma_ring_ib_execute() 156 radeon_ring_write(ring, upper_32_bits(ib->gpu_addr)); in cik_sdma_ring_ib_execute() 157 radeon_ring_write(ring, ib->length_dw); in cik_sdma_ring_ib_execute() 704 struct radeon_ib ib; in cik_sdma_ib_test() local 721 r = radeon_ib_get(rdev, ring->idx, &ib, NULL, 256); in cik_sdma_ib_test() 727 ib.ptr[0] = SDMA_PACKET(SDMA_OPCODE_WRITE, SDMA_WRITE_SUB_OPCODE_LINEAR, 0); in cik_sdma_ib_test() 728 ib.ptr[1] = lower_32_bits(gpu_addr); in cik_sdma_ib_test() [all …]
|
D | evergreen_cs.c | 450 uint32_t *ib = p->ib.ptr; in evergreen_cs_track_validate_cb() local 472 ib[track->cb_color_slice_idx[id]] = slice; in evergreen_cs_track_validate_cb() 1098 u32 tmp, *ib; in evergreen_cs_handle_reg() local 1101 ib = p->ib.ptr; in evergreen_cs_handle_reg() 1149 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); in evergreen_cs_handle_reg() 1178 ib[idx] &= ~Z_ARRAY_MODE(0xf); in evergreen_cs_handle_reg() 1180 ib[idx] |= Z_ARRAY_MODE(evergreen_cs_get_aray_mode(reloc->tiling_flags)); in evergreen_cs_handle_reg() 1188 ib[idx] |= DB_NUM_BANKS(evergreen_cs_get_num_banks(track->nbanks)); in evergreen_cs_handle_reg() 1189 ib[idx] |= DB_TILE_SPLIT(tile_split) | in evergreen_cs_handle_reg() 1221 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); in evergreen_cs_handle_reg() [all …]
|
D | radeon_vm.c | 360 struct radeon_ib *ib, in radeon_vm_set_pages() argument 369 radeon_asic_vm_copy_pages(rdev, ib, pe, src, count); in radeon_vm_set_pages() 372 radeon_asic_vm_write_pages(rdev, ib, pe, addr, in radeon_vm_set_pages() 376 radeon_asic_vm_set_pages(rdev, ib, pe, addr, in radeon_vm_set_pages() 390 struct radeon_ib ib; in radeon_vm_clear_bo() local 406 r = radeon_ib_get(rdev, R600_RING_TYPE_DMA_INDEX, &ib, NULL, 256); in radeon_vm_clear_bo() 410 ib.length_dw = 0; in radeon_vm_clear_bo() 412 radeon_vm_set_pages(rdev, &ib, addr, 0, entries, 0, 0); in radeon_vm_clear_bo() 413 radeon_asic_vm_pad_ib(rdev, &ib); in radeon_vm_clear_bo() 414 WARN_ON(ib.length_dw > 64); in radeon_vm_clear_bo() [all …]
|
D | r600_cs.c | 356 volatile u32 *ib = p->ib.ptr; in r600_cs_track_validate_cb() local 467 ib[track->cb_color_size_idx[i]] = tmp; in r600_cs_track_validate_cb() 526 volatile u32 *ib = p->ib.ptr; in r600_cs_track_validate_db() local 564 ib[track->db_depth_size_idx] = S_028000_SLICE_TILE_MAX(tmp - 1) | (track->db_depth_size & 0x3FF); in r600_cs_track_validate_db() 834 volatile uint32_t *ib; in r600_cs_common_vline_parse() local 836 ib = p->ib.ptr; in r600_cs_common_vline_parse() 899 ib[h_idx + 2] = PACKET2(0); in r600_cs_common_vline_parse() 900 ib[h_idx + 3] = PACKET2(0); in r600_cs_common_vline_parse() 901 ib[h_idx + 4] = PACKET2(0); in r600_cs_common_vline_parse() 902 ib[h_idx + 5] = PACKET2(0); in r600_cs_common_vline_parse() [all …]
|
D | r600_dma.c | 339 struct radeon_ib ib; in r600_dma_ib_test() local 353 r = radeon_ib_get(rdev, ring->idx, &ib, NULL, 256); in r600_dma_ib_test() 359 ib.ptr[0] = DMA_PACKET(DMA_PACKET_WRITE, 0, 0, 1); in r600_dma_ib_test() 360 ib.ptr[1] = lower_32_bits(gpu_addr); in r600_dma_ib_test() 361 ib.ptr[2] = upper_32_bits(gpu_addr) & 0xff; in r600_dma_ib_test() 362 ib.ptr[3] = 0xDEADBEEF; in r600_dma_ib_test() 363 ib.length_dw = 4; in r600_dma_ib_test() 365 r = radeon_ib_schedule(rdev, &ib, NULL, false); in r600_dma_ib_test() 367 radeon_ib_free(rdev, &ib); in r600_dma_ib_test() 371 r = radeon_fence_wait(ib.fence, false); in r600_dma_ib_test() [all …]
|
D | evergreen_dma.c | 68 struct radeon_ib *ib) in evergreen_dma_ring_ib_execute() argument 70 struct radeon_ring *ring = &rdev->ring[ib->ring]; in evergreen_dma_ring_ib_execute() 89 radeon_ring_write(ring, (ib->gpu_addr & 0xFFFFFFE0)); in evergreen_dma_ring_ib_execute() 90 radeon_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF)); in evergreen_dma_ring_ib_execute()
|
D | r200.c | 151 volatile uint32_t *ib; in r200_packet0_check() local 159 ib = p->ib.ptr; in r200_packet0_check() 191 ib[idx] = idx_value + ((u32)reloc->gpu_offset); in r200_packet0_check() 204 ib[idx] = idx_value + ((u32)reloc->gpu_offset); in r200_packet0_check() 228 ib[idx] = tmp + ((u32)reloc->gpu_offset); in r200_packet0_check() 230 ib[idx] = idx_value + ((u32)reloc->gpu_offset); in r200_packet0_check() 274 ib[idx] = idx_value + ((u32)reloc->gpu_offset); in r200_packet0_check() 300 ib[idx] = tmp; in r200_packet0_check() 302 ib[idx] = idx_value; in r200_packet0_check() 368 ib[idx] = idx_value + ((u32)reloc->gpu_offset); in r200_packet0_check()
|
D | radeon_cs.c | 177 p->vm_bos = radeon_vm_get_bos(p->rdev, p->ib.vm, in radeon_cs_parser_relocs() 242 r = radeon_sync_resv(p->rdev, &p->ib.sync, resv, in radeon_cs_sync_rings() 267 p->ib.sa_bo = NULL; in radeon_cs_parser_init() 415 &parser->ib.fence->base); in radeon_cs_parser_fini() 437 radeon_ib_free(parser->rdev, &parser->ib); in radeon_cs_parser_fini() 471 r = radeon_ib_schedule(rdev, &parser->ib, NULL, true); in radeon_cs_ib_chunk() 517 radeon_sync_fence(&p->ib.sync, bo_va->last_pt_update); in radeon_bo_vm_update_pte() 542 r = radeon_ring_ib_parse(rdev, parser->ring, &parser->ib); in radeon_cs_ib_vm_chunk() 565 r = radeon_ib_schedule(rdev, &parser->ib, &parser->const_ib, true); in radeon_cs_ib_vm_chunk() 567 r = radeon_ib_schedule(rdev, &parser->ib, NULL, true); in radeon_cs_ib_vm_chunk() [all …]
|
D | radeon_uvd.c | 552 p->ib.ptr[data0] = start & 0xFFFFFFFF; in radeon_uvd_cs_reloc() 553 p->ib.ptr[data1] = start >> 32; in radeon_uvd_cs_reloc() 699 struct radeon_ib ib; in radeon_uvd_send_msg() local 702 r = radeon_ib_get(rdev, ring, &ib, NULL, 64); in radeon_uvd_send_msg() 706 ib.ptr[0] = PACKET0(UVD_GPCOM_VCPU_DATA0, 0); in radeon_uvd_send_msg() 707 ib.ptr[1] = addr; in radeon_uvd_send_msg() 708 ib.ptr[2] = PACKET0(UVD_GPCOM_VCPU_DATA1, 0); in radeon_uvd_send_msg() 709 ib.ptr[3] = addr >> 32; in radeon_uvd_send_msg() 710 ib.ptr[4] = PACKET0(UVD_GPCOM_VCPU_CMD, 0); in radeon_uvd_send_msg() 711 ib.ptr[5] = 0; in radeon_uvd_send_msg() [all …]
|
D | radeon_asic.h | 95 void r100_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib); 334 void r600_dma_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib); 344 void r600_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib); 518 void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib); 545 struct radeon_ib *ib); 610 void cayman_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib); 616 int evergreen_ib_parse(struct radeon_device *rdev, struct radeon_ib *ib); 617 int evergreen_dma_ib_parse(struct radeon_device *rdev, struct radeon_ib *ib); 619 struct radeon_ib *ib); 624 struct radeon_ib *ib, [all …]
|
D | r100.c | 1295 p->ib.ptr[idx] = (value & 0x3fc00000) | tmp; in r100_reloc_pitch_offset() 1297 p->ib.ptr[idx] = (value & 0xffc00000) | tmp; in r100_reloc_pitch_offset() 1309 volatile uint32_t *ib; in r100_packet3_load_vbpntr() local 1312 ib = p->ib.ptr; in r100_packet3_load_vbpntr() 1331 ib[idx+1] = radeon_get_ib_value(p, idx + 1) + ((u32)reloc->gpu_offset); in r100_packet3_load_vbpntr() 1343 ib[idx+2] = radeon_get_ib_value(p, idx + 2) + ((u32)reloc->gpu_offset); in r100_packet3_load_vbpntr() 1357 ib[idx+1] = radeon_get_ib_value(p, idx + 1) + ((u32)reloc->gpu_offset); in r100_packet3_load_vbpntr() 1432 volatile uint32_t *ib; in r100_cs_packet_parse_vline() local 1434 ib = p->ib.ptr; in r100_cs_packet_parse_vline() 1475 ib[h_idx + 2] = PACKET2(0); in r100_cs_packet_parse_vline() [all …]
|
D | r300.c | 634 volatile uint32_t *ib; in r300_packet0_check() local 640 ib = p->ib.ptr; in r300_packet0_check() 676 ib[idx] = idx_value + ((u32)reloc->gpu_offset); in r300_packet0_check() 689 ib[idx] = idx_value + ((u32)reloc->gpu_offset); in r300_packet0_check() 717 ib[idx] = (idx_value & 31) | /* keep the 1st 5 bits */ in r300_packet0_check() 729 ib[idx] = tmp; in r300_packet0_check() 798 ib[idx] = tmp; in r300_packet0_check() 883 ib[idx] = tmp; in r300_packet0_check() 1088 ib[idx] = idx_value + ((u32)reloc->gpu_offset); in r300_packet0_check() 1101 ib[idx] = idx_value & ~1; in r300_packet0_check() [all …]
|
D | uvd_v1_0.c | 481 void uvd_v1_0_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib) in uvd_v1_0_ib_execute() argument 483 struct radeon_ring *ring = &rdev->ring[ib->ring]; in uvd_v1_0_ib_execute() 486 radeon_ring_write(ring, ib->gpu_addr); in uvd_v1_0_ib_execute() 488 radeon_ring_write(ring, ib->length_dw); in uvd_v1_0_ib_execute()
|
D | radeon.h | 1020 struct radeon_ib *ib, struct radeon_vm *vm, 1022 void radeon_ib_free(struct radeon_device *rdev, struct radeon_ib *ib); 1023 int radeon_ib_schedule(struct radeon_device *rdev, struct radeon_ib *ib, 1092 struct radeon_ib ib; member 1109 return p->ib.ptr[idx]; in radeon_get_ib_value() 1745 void radeon_vce_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib); 1823 int (*ib_parse)(struct radeon_device *rdev, struct radeon_ib *ib); 1827 void (*ib_execute)(struct radeon_device *rdev, struct radeon_ib *ib); 1877 struct radeon_ib *ib, 1881 struct radeon_ib *ib, [all …]
|
D | si.c | 3401 void si_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib) in si_ring_ib_execute() argument 3403 struct radeon_ring *ring = &rdev->ring[ib->ring]; in si_ring_ib_execute() 3404 unsigned vm_id = ib->vm ? ib->vm->ids[ib->ring].id : 0; in si_ring_ib_execute() 3407 if (ib->is_const_ib) { in si_ring_ib_execute() 3438 (ib->gpu_addr & 0xFFFFFFFC)); in si_ring_ib_execute() 3439 radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFFFF); in si_ring_ib_execute() 3440 radeon_ring_write(ring, ib->length_dw | (vm_id << 24)); in si_ring_ib_execute() 3442 if (!ib->is_const_ib) { in si_ring_ib_execute() 4445 u32 *ib, struct radeon_cs_packet *pkt) in si_vm_packet3_ce_check() argument 4466 static int si_vm_packet3_cp_dma_check(u32 *ib, u32 idx) in si_vm_packet3_cp_dma_check() argument [all …]
|
D | ni.c | 1431 void cayman_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib) in cayman_ring_ib_execute() argument 1433 struct radeon_ring *ring = &rdev->ring[ib->ring]; in cayman_ring_ib_execute() 1434 unsigned vm_id = ib->vm ? ib->vm->ids[ib->ring].id : 0; in cayman_ring_ib_execute() 1455 (ib->gpu_addr & 0xFFFFFFFC)); in cayman_ring_ib_execute() 1456 radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF); in cayman_ring_ib_execute() 1457 radeon_ring_write(ring, ib->length_dw | (vm_id << 24)); in cayman_ring_ib_execute()
|
D | r600.c | 3326 void r600_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib) in r600_ring_ib_execute() argument 3328 struct radeon_ring *ring = &rdev->ring[ib->ring]; in r600_ring_ib_execute() 3351 (ib->gpu_addr & 0xFFFFFFFC)); in r600_ring_ib_execute() 3352 radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF); in r600_ring_ib_execute() 3353 radeon_ring_write(ring, ib->length_dw); in r600_ring_ib_execute() 3358 struct radeon_ib ib; in r600_ib_test() local 3370 r = radeon_ib_get(rdev, ring->idx, &ib, NULL, 256); in r600_ib_test() 3375 ib.ptr[0] = PACKET3(PACKET3_SET_CONFIG_REG, 1); in r600_ib_test() 3376 ib.ptr[1] = ((scratch - PACKET3_SET_CONFIG_REG_OFFSET) >> 2); in r600_ib_test() 3377 ib.ptr[2] = 0xDEADBEEF; in r600_ib_test() [all …]
|
D | cik.c | 4141 void cik_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib) in cik_ring_ib_execute() argument 4143 struct radeon_ring *ring = &rdev->ring[ib->ring]; in cik_ring_ib_execute() 4144 unsigned vm_id = ib->vm ? ib->vm->ids[ib->ring].id : 0; in cik_ring_ib_execute() 4147 if (ib->is_const_ib) { in cik_ring_ib_execute() 4173 control |= ib->length_dw | (vm_id << 24); in cik_ring_ib_execute() 4176 radeon_ring_write(ring, (ib->gpu_addr & 0xFFFFFFFC)); in cik_ring_ib_execute() 4177 radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFFFF); in cik_ring_ib_execute() 4193 struct radeon_ib ib; in cik_ib_test() local 4205 r = radeon_ib_get(rdev, ring->idx, &ib, NULL, 256); in cik_ib_test() 4211 ib.ptr[0] = PACKET3(PACKET3_SET_UCONFIG_REG, 1); in cik_ib_test() [all …]
|
D | r600_cp.c | 66 unsigned family, u32 *ib, int *l); 2619 u32 *ib, cs_id = 0; in r600_cs_legacy_ioctl() local 2637 ib = dev->agp_buffer_map->handle + buf->offset; in r600_cs_legacy_ioctl() 2639 r = r600_cs_legacy(dev, data, fpriv, family, ib, &l); in r600_cs_legacy_ioctl()
|
D | evergreen.c | 3019 void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib) in evergreen_ring_ib_execute() argument 3021 struct radeon_ring *ring = &rdev->ring[ib->ring]; in evergreen_ring_ib_execute() 3048 (ib->gpu_addr & 0xFFFFFFFC)); in evergreen_ring_ib_execute() 3049 radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF); in evergreen_ring_ib_execute() 3050 radeon_ring_write(ring, ib->length_dw); in evergreen_ring_ib_execute()
|
/linux-4.4.14/drivers/gpu/drm/amd/amdgpu/ |
D | amdgpu_vce.c | 372 struct amdgpu_ib *ib = NULL; in amdgpu_vce_get_create_msg() local 378 ib = kzalloc(sizeof(struct amdgpu_ib), GFP_KERNEL); in amdgpu_vce_get_create_msg() 379 if (!ib) in amdgpu_vce_get_create_msg() 381 r = amdgpu_ib_get(ring, NULL, ib_size_dw * 4, ib); in amdgpu_vce_get_create_msg() 384 kfree(ib); in amdgpu_vce_get_create_msg() 388 dummy = ib->gpu_addr + 1024; in amdgpu_vce_get_create_msg() 391 ib->length_dw = 0; in amdgpu_vce_get_create_msg() 392 ib->ptr[ib->length_dw++] = 0x0000000c; /* len */ in amdgpu_vce_get_create_msg() 393 ib->ptr[ib->length_dw++] = 0x00000001; /* session cmd */ in amdgpu_vce_get_create_msg() 394 ib->ptr[ib->length_dw++] = handle; in amdgpu_vce_get_create_msg() [all …]
|
D | amdgpu_ib.c | 59 unsigned size, struct amdgpu_ib *ib) in amdgpu_ib_get() argument 66 &ib->sa_bo, size, 256); in amdgpu_ib_get() 72 ib->ptr = amdgpu_sa_bo_cpu_addr(ib->sa_bo); in amdgpu_ib_get() 75 ib->gpu_addr = amdgpu_sa_bo_gpu_addr(ib->sa_bo); in amdgpu_ib_get() 78 amdgpu_sync_create(&ib->sync); in amdgpu_ib_get() 80 ib->ring = ring; in amdgpu_ib_get() 81 ib->vm = vm; in amdgpu_ib_get() 94 void amdgpu_ib_free(struct amdgpu_device *adev, struct amdgpu_ib *ib) in amdgpu_ib_free() argument 96 amdgpu_sync_free(adev, &ib->sync, &ib->fence->base); in amdgpu_ib_free() 97 amdgpu_sa_bo_free(adev, &ib->sa_bo, &ib->fence->base); in amdgpu_ib_free() [all …]
|
D | sdma_v2_4.c | 245 struct amdgpu_ib *ib) in sdma_v2_4_ring_emit_ib() argument 247 u32 vmid = (ib->vm ? ib->vm->ids[ring->idx].id : 0) & 0xf; in sdma_v2_4_ring_emit_ib() 268 amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr) & 0xffffffe0); in sdma_v2_4_ring_emit_ib() 269 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr)); in sdma_v2_4_ring_emit_ib() 270 amdgpu_ring_write(ring, ib->length_dw); in sdma_v2_4_ring_emit_ib() 684 struct amdgpu_ib ib; in sdma_v2_4_ring_test_ib() local 701 memset(&ib, 0, sizeof(ib)); in sdma_v2_4_ring_test_ib() 702 r = amdgpu_ib_get(ring, NULL, 256, &ib); in sdma_v2_4_ring_test_ib() 708 ib.ptr[0] = SDMA_PKT_HEADER_OP(SDMA_OP_WRITE) | in sdma_v2_4_ring_test_ib() 710 ib.ptr[1] = lower_32_bits(gpu_addr); in sdma_v2_4_ring_test_ib() [all …]
|
D | cik_sdma.c | 213 struct amdgpu_ib *ib) in cik_sdma_ring_emit_ib() argument 215 u32 extra_bits = (ib->vm ? ib->vm->ids[ring->idx].id : 0) & 0xf; in cik_sdma_ring_emit_ib() 232 amdgpu_ring_write(ring, ib->gpu_addr & 0xffffffe0); /* base must be 32 byte aligned */ in cik_sdma_ring_emit_ib() 233 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xffffffff); in cik_sdma_ring_emit_ib() 234 amdgpu_ring_write(ring, ib->length_dw); in cik_sdma_ring_emit_ib() 630 struct amdgpu_ib ib; in cik_sdma_ring_test_ib() local 647 memset(&ib, 0, sizeof(ib)); in cik_sdma_ring_test_ib() 648 r = amdgpu_ib_get(ring, NULL, 256, &ib); in cik_sdma_ring_test_ib() 654 ib.ptr[0] = SDMA_PACKET(SDMA_OPCODE_WRITE, SDMA_WRITE_SUB_OPCODE_LINEAR, 0); in cik_sdma_ring_test_ib() 655 ib.ptr[1] = lower_32_bits(gpu_addr); in cik_sdma_ring_test_ib() [all …]
|
D | sdma_v3_0.c | 356 struct amdgpu_ib *ib) in sdma_v3_0_ring_emit_ib() argument 358 u32 vmid = (ib->vm ? ib->vm->ids[ring->idx].id : 0) & 0xf; in sdma_v3_0_ring_emit_ib() 378 amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr) & 0xffffffe0); in sdma_v3_0_ring_emit_ib() 379 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr)); in sdma_v3_0_ring_emit_ib() 380 amdgpu_ring_write(ring, ib->length_dw); in sdma_v3_0_ring_emit_ib() 834 struct amdgpu_ib ib; in sdma_v3_0_ring_test_ib() local 851 memset(&ib, 0, sizeof(ib)); in sdma_v3_0_ring_test_ib() 852 r = amdgpu_ib_get(ring, NULL, 256, &ib); in sdma_v3_0_ring_test_ib() 858 ib.ptr[0] = SDMA_PKT_HEADER_OP(SDMA_OP_WRITE) | in sdma_v3_0_ring_test_ib() 860 ib.ptr[1] = lower_32_bits(gpu_addr); in sdma_v3_0_ring_test_ib() [all …]
|
D | amdgpu_vm.c | 300 struct amdgpu_ib *ib, in amdgpu_vm_update_pages() argument 309 amdgpu_vm_copy_pte(adev, ib, pe, src, count); in amdgpu_vm_update_pages() 312 amdgpu_vm_write_pte(adev, ib, pe, addr, in amdgpu_vm_update_pages() 316 amdgpu_vm_set_pte_pde(adev, ib, pe, addr, in amdgpu_vm_update_pages() 343 struct amdgpu_ib *ib; in amdgpu_vm_clear_bo() local 359 ib = kzalloc(sizeof(struct amdgpu_ib), GFP_KERNEL); in amdgpu_vm_clear_bo() 360 if (!ib) in amdgpu_vm_clear_bo() 363 r = amdgpu_ib_get(ring, NULL, entries * 2 + 64, ib); in amdgpu_vm_clear_bo() 367 ib->length_dw = 0; in amdgpu_vm_clear_bo() 369 amdgpu_vm_update_pages(adev, ib, addr, 0, entries, 0, 0, 0); in amdgpu_vm_clear_bo() [all …]
|
D | amdgpu_uvd.c | 624 struct amdgpu_ib *ib; in amdgpu_uvd_cs_pass2() local 646 ib = &ctx->parser->ibs[ctx->ib_idx]; in amdgpu_uvd_cs_pass2() 647 ib->ptr[ctx->data0] = start & 0xFFFFFFFF; in amdgpu_uvd_cs_pass2() 648 ib->ptr[ctx->data1] = start >> 32; in amdgpu_uvd_cs_pass2() 710 struct amdgpu_ib *ib = &ctx->parser->ibs[ctx->ib_idx]; in amdgpu_uvd_cs_reg() local 717 if (ctx->idx >= ib->length_dw) { in amdgpu_uvd_cs_reg() 756 struct amdgpu_ib *ib = &ctx->parser->ibs[ctx->ib_idx]; in amdgpu_uvd_cs_packets() local 759 for (ctx->idx = 0 ; ctx->idx < ib->length_dw; ) { in amdgpu_uvd_cs_packets() 798 struct amdgpu_ib *ib = &parser->ibs[ib_idx]; in amdgpu_uvd_ring_parse_cs() local 801 if (ib->length_dw % 16) { in amdgpu_uvd_ring_parse_cs() [all …]
|
D | amdgpu_cs.c | 626 struct amdgpu_ib *ib; in amdgpu_cs_ib_fill() local 631 ib = &parser->ibs[j]; in amdgpu_cs_ib_fill() 671 r = amdgpu_ib_get(ring, NULL, chunk_ib->ib_bytes, ib); in amdgpu_cs_ib_fill() 677 memcpy(ib->ptr, kptr, chunk_ib->ib_bytes); in amdgpu_cs_ib_fill() 680 r = amdgpu_ib_get(ring, vm, 0, ib); in amdgpu_cs_ib_fill() 686 ib->gpu_addr = chunk_ib->va_start; in amdgpu_cs_ib_fill() 689 ib->length_dw = chunk_ib->ib_bytes / 4; in amdgpu_cs_ib_fill() 690 ib->flags = chunk_ib->flags; in amdgpu_cs_ib_fill() 691 ib->ctx = parser->ctx; in amdgpu_cs_ib_fill() 703 struct amdgpu_ib *ib = &parser->ibs[0]; in amdgpu_cs_ib_fill() local [all …]
|
D | amdgpu_trace.h | 57 __field(struct amdgpu_ib *, ib) 66 __entry->ib = job->ibs; 72 __entry->adev, __entry->sched_job, __entry->ib, 82 __field(struct amdgpu_ib *, ib) 91 __entry->ib = job->ibs; 97 __entry->adev, __entry->sched_job, __entry->ib,
|
D | amdgpu.h | 256 void (*emit_copy_buffer)(struct amdgpu_ib *ib, 271 void (*emit_fill_buffer)(struct amdgpu_ib *ib, 283 void (*copy_pte)(struct amdgpu_ib *ib, 287 void (*write_pte)(struct amdgpu_ib *ib, 292 void (*set_pte_pde)(struct amdgpu_ib *ib, 297 void (*pad_ib)(struct amdgpu_ib *ib); 332 struct amdgpu_ib *ib); 1209 unsigned size, struct amdgpu_ib *ib); 1210 void amdgpu_ib_free(struct amdgpu_device *adev, struct amdgpu_ib *ib); 1212 struct amdgpu_ib *ib, void *owner); [all …]
|
D | amdgpu_ttm.c | 1020 struct amdgpu_ib *ib; in amdgpu_copy_buffer() local 1032 ib = kzalloc(sizeof(struct amdgpu_ib), GFP_KERNEL); in amdgpu_copy_buffer() 1033 if (!ib) in amdgpu_copy_buffer() 1036 r = amdgpu_ib_get(ring, NULL, num_dw * 4, ib); in amdgpu_copy_buffer() 1038 kfree(ib); in amdgpu_copy_buffer() 1042 ib->length_dw = 0; in amdgpu_copy_buffer() 1045 r = amdgpu_sync_resv(adev, &ib->sync, resv, in amdgpu_copy_buffer() 1056 amdgpu_emit_copy_buffer(adev, ib, src_offset, dst_offset, in amdgpu_copy_buffer() 1064 amdgpu_vm_pad_ib(adev, ib); in amdgpu_copy_buffer() 1065 WARN_ON(ib->length_dw > num_dw); in amdgpu_copy_buffer() [all …]
|
D | amdgpu_vce.h | 40 void amdgpu_vce_ring_emit_ib(struct amdgpu_ring *ring, struct amdgpu_ib *ib);
|
D | uvd_v6_0.c | 563 struct amdgpu_ib *ib) in uvd_v6_0_ring_emit_ib() argument 566 amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr)); in uvd_v6_0_ring_emit_ib() 568 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr)); in uvd_v6_0_ring_emit_ib() 570 amdgpu_ring_write(ring, ib->length_dw); in uvd_v6_0_ring_emit_ib()
|
D | uvd_v5_0.c | 563 struct amdgpu_ib *ib) in uvd_v5_0_ring_emit_ib() argument 566 amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr)); in uvd_v5_0_ring_emit_ib() 568 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr)); in uvd_v5_0_ring_emit_ib() 570 amdgpu_ring_write(ring, ib->length_dw); in uvd_v5_0_ring_emit_ib()
|
D | gfx_v8_0.c | 667 struct amdgpu_ib ib; in gfx_v8_0_ring_test_ib() local 680 memset(&ib, 0, sizeof(ib)); in gfx_v8_0_ring_test_ib() 681 r = amdgpu_ib_get(ring, NULL, 256, &ib); in gfx_v8_0_ring_test_ib() 686 ib.ptr[0] = PACKET3(PACKET3_SET_UCONFIG_REG, 1); in gfx_v8_0_ring_test_ib() 687 ib.ptr[1] = ((scratch - PACKET3_SET_UCONFIG_REG_START)); in gfx_v8_0_ring_test_ib() 688 ib.ptr[2] = 0xDEADBEEF; in gfx_v8_0_ring_test_ib() 689 ib.length_dw = 3; in gfx_v8_0_ring_test_ib() 691 r = amdgpu_sched_ib_submit_kernel_helper(adev, ring, &ib, 1, NULL, in gfx_v8_0_ring_test_ib() 719 amdgpu_ib_free(adev, &ib); in gfx_v8_0_ring_test_ib() 4543 struct amdgpu_ib *ib) in gfx_v8_0_ring_emit_ib_gfx() argument [all …]
|
D | gfx_v7_0.c | 2565 struct amdgpu_ib *ib) in gfx_v7_0_ring_emit_ib_gfx() argument 2567 bool need_ctx_switch = ring->current_ctx != ib->ctx; in gfx_v7_0_ring_emit_ib_gfx() 2572 if ((ib->flags & AMDGPU_IB_FLAG_PREAMBLE) && !need_ctx_switch) in gfx_v7_0_ring_emit_ib_gfx() 2591 if (ib->flags & AMDGPU_IB_FLAG_CE) in gfx_v7_0_ring_emit_ib_gfx() 2596 control |= ib->length_dw | in gfx_v7_0_ring_emit_ib_gfx() 2597 (ib->vm ? (ib->vm->ids[ring->idx].id << 24) : 0); in gfx_v7_0_ring_emit_ib_gfx() 2604 (ib->gpu_addr & 0xFFFFFFFC)); in gfx_v7_0_ring_emit_ib_gfx() 2605 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFFFF); in gfx_v7_0_ring_emit_ib_gfx() 2610 struct amdgpu_ib *ib) in gfx_v7_0_ring_emit_ib_compute() argument 2625 control |= ib->length_dw | in gfx_v7_0_ring_emit_ib_compute() [all …]
|
D | uvd_v4_2.c | 519 struct amdgpu_ib *ib) in uvd_v4_2_ring_emit_ib() argument 522 amdgpu_ring_write(ring, ib->gpu_addr); in uvd_v4_2_ring_emit_ib() 524 amdgpu_ring_write(ring, ib->length_dw); in uvd_v4_2_ring_emit_ib()
|
/linux-4.4.14/arch/s390/include/asm/ |
D | idals.h | 119 struct idal_buffer *ib; in idal_buffer_alloc() local 124 ib = kmalloc(sizeof(struct idal_buffer) + nr_ptrs*sizeof(void *), in idal_buffer_alloc() 126 if (ib == NULL) in idal_buffer_alloc() 128 ib->size = size; in idal_buffer_alloc() 129 ib->page_order = page_order; in idal_buffer_alloc() 132 ib->data[i] = ib->data[i-1] + IDA_BLOCK_SIZE; in idal_buffer_alloc() 135 ib->data[i] = (void *) in idal_buffer_alloc() 137 if (ib->data[i] != NULL) in idal_buffer_alloc() 142 free_pages((unsigned long) ib->data[i], in idal_buffer_alloc() 143 ib->page_order); in idal_buffer_alloc() [all …]
|
/linux-4.4.14/drivers/net/ethernet/amd/ |
D | 7990.c | 99 t, ib->brx_ring[t].rmd1_hadr, ib->brx_ring[t].rmd0, \ 100 ib->brx_ring[t].length, \ 101 ib->brx_ring[t].mblength, ib->brx_ring[t].rmd1_bits); \ 105 t, ib->btx_ring[t].tmd1_hadr, ib->btx_ring[t].tmd0, \ 106 ib->btx_ring[t].length, \ 107 ib->btx_ring[t].misc, ib->btx_ring[t].tmd1_bits); \ 139 volatile struct lance_init_block *ib = lp->init_block; in lance_init_ring() local 149 ib->mode = LE_MO_PROM; /* normal, enable Tx & Rx */ in lance_init_ring() 162 ib->phys_addr[0] = dev->dev_addr[1]; in lance_init_ring() 163 ib->phys_addr[1] = dev->dev_addr[0]; in lance_init_ring() [all …]
|
D | sunlance.c | 323 struct lance_init_block *ib = lp->init_block_mem; in lance_init_ring_dvma() local 336 ib->phys_addr [0] = dev->dev_addr [1]; in lance_init_ring_dvma() 337 ib->phys_addr [1] = dev->dev_addr [0]; in lance_init_ring_dvma() 338 ib->phys_addr [2] = dev->dev_addr [3]; in lance_init_ring_dvma() 339 ib->phys_addr [3] = dev->dev_addr [2]; in lance_init_ring_dvma() 340 ib->phys_addr [4] = dev->dev_addr [5]; in lance_init_ring_dvma() 341 ib->phys_addr [5] = dev->dev_addr [4]; in lance_init_ring_dvma() 346 ib->btx_ring [i].tmd0 = leptr; in lance_init_ring_dvma() 347 ib->btx_ring [i].tmd1_hadr = leptr >> 16; in lance_init_ring_dvma() 348 ib->btx_ring [i].tmd1_bits = 0; in lance_init_ring_dvma() [all …]
|
D | a2065.c | 152 volatile struct lance_init_block *ib = lp->init_block; in lance_init_ring() local 163 ib->mode = 0; in lance_init_ring() 168 ib->phys_addr[0] = dev->dev_addr[1]; in lance_init_ring() 169 ib->phys_addr[1] = dev->dev_addr[0]; in lance_init_ring() 170 ib->phys_addr[2] = dev->dev_addr[3]; in lance_init_ring() 171 ib->phys_addr[3] = dev->dev_addr[2]; in lance_init_ring() 172 ib->phys_addr[4] = dev->dev_addr[5]; in lance_init_ring() 173 ib->phys_addr[5] = dev->dev_addr[4]; in lance_init_ring() 179 ib->btx_ring[i].tmd0 = leptr; in lance_init_ring() 180 ib->btx_ring[i].tmd1_hadr = leptr >> 16; in lance_init_ring() [all …]
|
D | declance.c | 234 #define lib_ptr(ib, rt, type) \ argument 235 ((volatile u16 *)((u8 *)(ib) + lib_off(rt, type))) 451 volatile u16 *ib = (volatile u16 *)dev->mem_start; in lance_init_ring() local 463 *lib_ptr(ib, phys_addr[0], lp->type) = (dev->dev_addr[1] << 8) | in lance_init_ring() 465 *lib_ptr(ib, phys_addr[1], lp->type) = (dev->dev_addr[3] << 8) | in lance_init_ring() 467 *lib_ptr(ib, phys_addr[2], lp->type) = (dev->dev_addr[5] << 8) | in lance_init_ring() 473 *lib_ptr(ib, rx_len, lp->type) = (LANCE_LOG_RX_BUFFERS << 13) | in lance_init_ring() 475 *lib_ptr(ib, rx_ptr, lp->type) = leptr; in lance_init_ring() 482 *lib_ptr(ib, tx_len, lp->type) = (LANCE_LOG_TX_BUFFERS << 13) | in lance_init_ring() 484 *lib_ptr(ib, tx_ptr, lp->type) = leptr; in lance_init_ring() [all …]
|
D | ni65.c | 226 struct init_block ib; member 580 p->ib.eaddr[i] = daddr[i]; in ni65_init_lance() 583 p->ib.filter[i] = filter; in ni65_init_lance() 584 p->ib.mode = mode; in ni65_init_lance() 586 p->ib.trp = (u32) isa_virt_to_bus(p->tmdhead) | TMDNUMMASK; in ni65_init_lance() 587 p->ib.rrp = (u32) isa_virt_to_bus(p->rmdhead) | RMDNUMMASK; in ni65_init_lance() 589 pib = (u32) isa_virt_to_bus(&p->ib); in ni65_init_lance()
|
D | pcnet32.c | 2623 volatile struct pcnet32_init_block *ib = lp->init_block; in pcnet32_load_multicast() local 2624 volatile __le16 *mcast_table = (__le16 *)ib->filter; in pcnet32_load_multicast() 2632 ib->filter[0] = cpu_to_le32(~0U); in pcnet32_load_multicast() 2633 ib->filter[1] = cpu_to_le32(~0U); in pcnet32_load_multicast() 2641 ib->filter[0] = 0; in pcnet32_load_multicast() 2642 ib->filter[1] = 0; in pcnet32_load_multicast()
|
/linux-4.4.14/drivers/infiniband/hw/mlx4/ |
D | ah.c | 48 ah->av.ib.port_pd = cpu_to_be32(to_mpd(pd)->pdn | (ah_attr->port_num << 24)); in create_ib_ah() 49 ah->av.ib.g_slid = ah_attr->src_path_bits; in create_ib_ah() 51 ah->av.ib.g_slid |= 0x80; in create_ib_ah() 52 ah->av.ib.gid_index = ah_attr->grh.sgid_index; in create_ib_ah() 53 ah->av.ib.hop_limit = ah_attr->grh.hop_limit; in create_ib_ah() 54 ah->av.ib.sl_tclass_flowlabel |= in create_ib_ah() 57 memcpy(ah->av.ib.dgid, ah_attr->grh.dgid.raw, 16); in create_ib_ah() 60 ah->av.ib.dlid = cpu_to_be16(ah_attr->dlid); in create_ib_ah() 62 ah->av.ib.stat_rate = ah_attr->static_rate + MLX4_STAT_RATE_OFFSET; in create_ib_ah() 63 while (ah->av.ib.stat_rate > IB_RATE_2_5_GBPS + MLX4_STAT_RATE_OFFSET && in create_ib_ah() [all …]
|
D | qp.c | 2175 be32_to_cpu(ah->av.ib.sl_tclass_flowlabel) >> 28; in build_sriov_qp0_header() 2177 cpu_to_be16(ah->av.ib.g_slid & 0x7f); in build_sriov_qp0_header() 2179 cpu_to_be16(ah->av.ib.g_slid & 0x7f); in build_sriov_qp0_header() 2297 be32_to_cpu(ah->av.ib.port_pd) >> 24, in build_mlx_header() 2298 ah->av.ib.gid_index, &sgid.raw[0]); in build_mlx_header() 2303 be32_to_cpu(ah->av.ib.port_pd) >> 24, in build_mlx_header() 2304 ah->av.ib.gid_index, &sgid, in build_mlx_header() 2321 be32_to_cpu(ah->av.ib.sl_tclass_flowlabel) >> 28; in build_mlx_header() 2322 sqp->ud_header.lrh.destination_lid = ah->av.ib.dlid; in build_mlx_header() 2323 sqp->ud_header.lrh.source_lid = cpu_to_be16(ah->av.ib.g_slid & 0x7f); in build_mlx_header() [all …]
|
D | mlx4_ib.h | 783 u8 port = be32_to_cpu(ah->av.ib.port_pd) >> 24 & 3; in mlx4_ib_ah_grh_present() 788 return !!(ah->av.ib.g_slid & 0x80); in mlx4_ib_ah_grh_present()
|
D | mad.c | 1229 to_mah(ah)->av.ib.gid_index = sgid_index; in mlx4_ib_send_to_wire() 1231 to_mah(ah)->av.ib.port_pd &= cpu_to_be32(0x7FFFFFFF); in mlx4_ib_send_to_wire() 1392 port = be32_to_cpu(ah.av.ib.port_pd) >> 24; in mlx4_ib_multiplex_mad() 1396 ah.av.ib.port_pd = cpu_to_be32(port << 24 | (be32_to_cpu(ah.av.ib.port_pd) & 0xffffff)); in mlx4_ib_multiplex_mad()
|
D | main.c | 1316 mlx4_spec->ib.l3_qpn = in parse_flow_attr() 1318 mlx4_spec->ib.qpn_mask = in parse_flow_attr()
|
/linux-4.4.14/drivers/isdn/divert/ |
D | divert_procfs.c | 43 struct divert_info *ib; in put_info_buffer() local 52 if (!(ib = kmalloc(sizeof(struct divert_info) + strlen(cp), GFP_ATOMIC))) in put_info_buffer() 54 strcpy(ib->info_start, cp); /* set output string */ in put_info_buffer() 55 ib->next = NULL; in put_info_buffer() 57 ib->usage_cnt = if_used; in put_info_buffer() 59 divert_info_head = ib; /* new head */ in put_info_buffer() 61 divert_info_tail->next = ib; /* follows existing messages */ in put_info_buffer() 62 divert_info_tail = ib; /* new tail */ in put_info_buffer() 68 ib = divert_info_head; in put_info_buffer() 70 kfree(ib); in put_info_buffer()
|
/linux-4.4.14/drivers/isdn/hysdn/ |
D | hysdn_proclog.c | 103 struct log_data *ib; in put_log_buffer() local 117 if (!(ib = kmalloc(sizeof(struct log_data) + strlen(cp), GFP_ATOMIC))) in put_log_buffer() 119 strcpy(ib->log_start, cp); /* set output string */ in put_log_buffer() 120 ib->next = NULL; in put_log_buffer() 121 ib->proc_ctrl = pd; /* point to own control structure */ in put_log_buffer() 123 ib->usage_cnt = pd->if_used; in put_log_buffer() 125 pd->log_head = ib; /* new head */ in put_log_buffer() 127 pd->log_tail->next = ib; /* follows existing messages */ in put_log_buffer() 128 pd->log_tail = ib; /* new tail */ in put_log_buffer() 137 ib = pd->log_head; in put_log_buffer() [all …]
|
/linux-4.4.14/drivers/s390/char/ |
D | fs3270.c | 247 struct idal_buffer *ib; in fs3270_read() local 255 ib = idal_buffer_alloc(count, 0); in fs3270_read() 256 if (IS_ERR(ib)) in fs3270_read() 263 raw3270_request_set_idal(rq, ib); in fs3270_read() 270 if (idal_buffer_to_user(ib, data, count) != 0) in fs3270_read() 280 idal_buffer_free(ib); in fs3270_read() 292 struct idal_buffer *ib; in fs3270_write() local 299 ib = idal_buffer_alloc(count, 0); in fs3270_write() 300 if (IS_ERR(ib)) in fs3270_write() 304 if (idal_buffer_from_user(ib, data, count) == 0) { in fs3270_write() [all …]
|
D | raw3270.c | 224 raw3270_request_set_idal(struct raw3270_request *rq, struct idal_buffer *ib) in raw3270_request_set_idal() argument 226 rq->ccw.cda = __pa(ib->data); in raw3270_request_set_idal() 227 rq->ccw.count = ib->size; in raw3270_request_set_idal()
|
/linux-4.4.14/drivers/net/ethernet/brocade/bna/ |
D | bna_hw_defs.h | 244 struct bna_ib *ib = _ib; \ 245 if ((ib->intr_type == BNA_INTR_T_INTX)) { \ 247 intx_mask &= ~(ib->intr_vector); \ 250 bna_ib_coalescing_timer_set(&ib->door_bell, \ 251 ib->coalescing_timeo); \ 253 bna_ib_ack(&ib->door_bell, 0); \ 259 struct bna_ib *ib = _ib; \ 261 ib->door_bell.doorbell_addr); \ 262 if (ib->intr_type == BNA_INTR_T_INTX) { \ 264 intx_mask |= ib->intr_vector; \
|
D | bna_tx_rx.c | 24 bna_ib_coalescing_timeo_set(struct bna_ib *ib, u8 coalescing_timeo) in bna_ib_coalescing_timeo_set() argument 26 ib->coalescing_timeo = coalescing_timeo; in bna_ib_coalescing_timeo_set() 27 ib->door_bell.doorbell_ack = BNA_DOORBELL_IB_INT_ACK( in bna_ib_coalescing_timeo_set() 28 (u32)ib->coalescing_timeo, 0); in bna_ib_coalescing_timeo_set() 1483 bna_ib_start(rx->bna, &rxp->cq.ib, is_regular); in bna_rx_sm_started_entry() 1675 cfg_req->q_cfg[i].ib.index_addr.a32.addr_lo = in bna_bfi_rx_enet_start() 1676 rxp->cq.ib.ib_seg_host_addr.lsb; in bna_bfi_rx_enet_start() 1677 cfg_req->q_cfg[i].ib.index_addr.a32.addr_hi = in bna_bfi_rx_enet_start() 1678 rxp->cq.ib.ib_seg_host_addr.msb; in bna_bfi_rx_enet_start() 1679 cfg_req->q_cfg[i].ib.intr.msix_index = in bna_bfi_rx_enet_start() [all …]
|
D | bna_types.h | 454 struct bna_ib ib; member 646 struct bna_ib ib; member
|
D | bfi_enet.h | 431 struct bfi_enet_ib ib; member 504 struct bfi_enet_ib ib; member
|
/linux-4.4.14/arch/arm/boot/dts/ |
D | kirkwood-ib62x0.dts | 8 …compatible = "raidsonic,ib-nas6210-b", "raidsonic,ib-nas6220-b", "raidsonic,ib-nas6210", "raidsoni…
|
/linux-4.4.14/Documentation/devicetree/bindings/arm/ |
D | marvell,kirkwood.txt | 67 "raidsonic,ib-nas6210" 68 "raidsonic,ib-nas6210-b" 69 "raidsonic,ib-nas6220" 70 "raidsonic,ib-nas6220-b" 71 "raidsonic,ib-nas62x0"
|
/linux-4.4.14/drivers/staging/rdma/ehca/ |
D | ehca_mrmw.c | 176 &e_maxmr->ib.ib_mr.lkey, in ehca_get_dma_mr() 177 &e_maxmr->ib.ib_mr.rkey); in ehca_get_dma_mr() 183 ib_mr = &e_maxmr->ib.ib_mr; in ehca_get_dma_mr() 260 e_pd, &e_mr->ib.ib_mr.lkey, in ehca_reg_phys_mr() 261 &e_mr->ib.ib_mr.rkey); in ehca_reg_phys_mr() 289 e_pd, &pginfo, &e_mr->ib.ib_mr.lkey, in ehca_reg_phys_mr() 290 &e_mr->ib.ib_mr.rkey, EHCA_REG_MR); in ehca_reg_phys_mr() 298 return &e_mr->ib.ib_mr; in ehca_reg_phys_mr() 405 e_pd, &pginfo, &e_mr->ib.ib_mr.lkey, in ehca_reg_user_mr() 406 &e_mr->ib.ib_mr.rkey, EHCA_REG_MR); in ehca_reg_user_mr() [all …]
|
D | ehca_classes.h | 270 } ib; member
|
/linux-4.4.14/drivers/infiniband/core/ |
D | cma.c | 218 struct ib_cm_id *ib; member 236 struct ib_sa_multicast *ib; member 359 kfree(mc->multicast.ib); in release_mc() 845 if (!id_priv->cm_id.ib || (id_priv->id.qp_type == IB_QPT_UD)) in rdma_init_qp_attr() 848 ret = ib_cm_init_qp_attr(id_priv->cm_id.ib, qp_attr, in rdma_init_qp_attr() 946 struct sockaddr_ib *listen_ib, *ib; in cma_save_ib_info() local 950 ib = (struct sockaddr_ib *)src_addr; in cma_save_ib_info() 951 ib->sib_family = AF_IB; in cma_save_ib_info() 953 ib->sib_pkey = path->pkey; in cma_save_ib_info() 954 ib->sib_flowinfo = path->flow_label; in cma_save_ib_info() [all …]
|
/linux-4.4.14/drivers/pinctrl/meson/ |
D | pinctrl-meson.h | 195 #define BANK(n, f, l, per, peb, pr, pb, dr, db, or, ob, ir, ib) \ argument 205 [REG_IN] = { ir, ib }, \
|
/linux-4.4.14/drivers/net/ethernet/chelsio/cxgb/ |
D | vsc7326.c | 221 static void run_table(adapter_t *adapter, struct init_table *ib, int len) in run_table() argument 226 if (ib[i].addr == INITBLOCK_SLEEP) { in run_table() 227 udelay( ib[i].data ); in run_table() 228 pr_err("sleep %d us\n",ib[i].data); in run_table() 230 vsc_write( adapter, ib[i].addr, ib[i].data ); in run_table()
|
/linux-4.4.14/drivers/net/ethernet/mellanox/mlx4/ |
D | mcg.c | 861 rule_hw->ib.l3_qpn = spec->ib.l3_qpn; in parse_trans_rule() 862 rule_hw->ib.qpn_mask = spec->ib.qpn_msk; in parse_trans_rule() 863 memcpy(&rule_hw->ib.dst_gid, &spec->ib.dst_gid, 16); in parse_trans_rule() 864 memcpy(&rule_hw->ib.dst_gid_msk, &spec->ib.dst_gid_msk, 16); in parse_trans_rule() 952 "dst-gid = %pI6\n", cur->ib.dst_gid); in mlx4_err_rule() 955 cur->ib.dst_gid_msk); in mlx4_err_rule() 1389 memcpy(spec.ib.dst_gid, gid, 16); in mlx4_trans_to_dmfs_attach() 1390 memset(&spec.ib.dst_gid_msk, 0xff, 16); in mlx4_trans_to_dmfs_attach()
|
/linux-4.4.14/drivers/video/fbdev/matrox/ |
D | matroxfb_maven.c | 832 unsigned int ib; in maven_compute_timming() local 869 ib = ((0x3C0000 * i - 0x8000)/ hdec + 0x05E7) >> 8; in maven_compute_timming() 871 } while (ib < ibmin); in maven_compute_timming() 872 if (ib >= m->htotal + 2) { in maven_compute_timming() 873 ib = ibmin; in maven_compute_timming() 879 m->regs[0x9E] = ib; in maven_compute_timming() 880 m->regs[0x9F] = ib >> 8; in maven_compute_timming()
|
/linux-4.4.14/net/rds/ |
D | Makefile | 8 ib.o ib_cm.o ib_recv.o ib_ring.o ib_send.o ib_stats.o \
|
/linux-4.4.14/drivers/media/dvb-frontends/ |
D | bcm3510.c | 207 u8 ob[MAX_XFER_SIZE], ib[MAX_XFER_SIZE]; in bcm3510_do_hab_cmd() local 210 if (ilen + 2 > sizeof(ib)) { in bcm3510_do_hab_cmd() 232 (ret = bcm3510_hab_get_response(st, ib, ilen+2)) < 0) in bcm3510_do_hab_cmd() 236 dbufout(ib,ilen+2,deb_hab); in bcm3510_do_hab_cmd() 239 memcpy(ibuf,&ib[2],ilen); in bcm3510_do_hab_cmd()
|
/linux-4.4.14/drivers/gpu/drm/amd/amdkfd/ |
D | kfd_packet_manager.c | 123 uint64_t ib, size_t ib_size_in_dwords, bool chain) in pm_create_runlist() argument 127 BUG_ON(!pm || !buffer || !ib); in pm_create_runlist() 139 packet->ordinal2 = lower_32_bits(ib); in pm_create_runlist() 140 packet->bitfields3.ib_base_hi = upper_32_bits(ib); in pm_create_runlist()
|
/linux-4.4.14/include/net/ |
D | inet_hashtables.h | 88 static inline struct net *ib_net(struct inet_bind_bucket *ib) in ib_net() argument 90 return read_pnet(&ib->ib_net); in ib_net()
|
/linux-4.4.14/drivers/net/ethernet/dec/tulip/ |
D | de2104x.c | 1855 struct de_srom_media_block *ib = bufp; in de21041_get_srom_info() local 1859 switch(ib->opts & MediaBlockMask) { in de21041_get_srom_info() 1891 bufp += sizeof (ib->opts); in de21041_get_srom_info() 1893 if (ib->opts & MediaCustomCSRs) { in de21041_get_srom_info() 1894 de->media[idx].csr13 = get_unaligned(&ib->csr13); in de21041_get_srom_info() 1895 de->media[idx].csr14 = get_unaligned(&ib->csr14); in de21041_get_srom_info() 1896 de->media[idx].csr15 = get_unaligned(&ib->csr15); in de21041_get_srom_info() 1897 bufp += sizeof(ib->csr13) + sizeof(ib->csr14) + in de21041_get_srom_info() 1898 sizeof(ib->csr15); in de21041_get_srom_info()
|
/linux-4.4.14/arch/ia64/sn/kernel/sn2/ |
D | sn2_smp.c | 330 sn2_ptc_deadlock_recovery(short *nasids, short ib, short ie, int mynasid, in sn2_ptc_deadlock_recovery() argument 343 for (i=ib; i <= ie; i++) { in sn2_ptc_deadlock_recovery()
|
/linux-4.4.14/arch/ia64/kernel/ |
D | mca_drv.c | 409 if (!pbci || pbci->ib) in is_mca_global() 699 if (pbci->ib) in recover_from_processor_error()
|
/linux-4.4.14/drivers/media/platform/vivid/ |
D | vivid-tpg-colors.c | 1168 double ir, ig, ib; in mult_matrix() local 1172 ib = m[2][0] * (*r) + m[2][1] * (*g) + m[2][2] * (*b); in mult_matrix() 1175 *b = ib; in mult_matrix()
|
/linux-4.4.14/drivers/gpu/drm/msm/mdp/mdp5/ |
D | mdp5_cmd_encoder.c | 44 .ib = (ib_val), \
|
D | mdp5_encoder.c | 50 .ib = (ib_val), \
|
/linux-4.4.14/include/linux/mlx4/ |
D | device.h | 787 struct mlx4_av ib; member 1208 struct mlx4_spec_ib ib; member 1315 struct mlx4_net_trans_rule_hw_ib ib; member
|
/linux-4.4.14/arch/ia64/include/asm/ |
D | pal.h | 574 ib : 1, /* Internal bus error */ member 742 #define pmci_bus_internal_error pme_bus.ib
|
/linux-4.4.14/Documentation/DocBook/media/ |
D | fieldseq_tb.gif.b64 | 394 XH/2aR9jRCn0f9ohLoEeeL/3ff/3gT/4hX/4ib/4jf/4eb8PYN9XCOMZquD5oT/6pX/6qb/6rf/6
|
/linux-4.4.14/include/rdma/ |
D | ib_verbs.h | 373 struct ib_protocol_stats ib; member 1482 struct ib_flow_spec_ib ib; member
|
/linux-4.4.14/ |
D | MAINTAINERS | 7058 T: git git://openfabrics.org/~eli/connect-ib.git 7068 T: git git://openfabrics.org/~eli/connect-ib.git
|