ring_id 796 drivers/crypto/inside-secure/safexcel.h int ring_id, ring_id 802 drivers/crypto/inside-secure/safexcel.h int ring_id, ring_id 116 drivers/crypto/inside-secure/safexcel_ring.c int ring_id, ring_id 124 drivers/crypto/inside-secure/safexcel_ring.c cdesc = safexcel_ring_next_wptr(priv, &priv->ring[ring_id].cdr); ring_id 169 drivers/crypto/inside-secure/safexcel_ring.c int ring_id, ring_id 175 drivers/crypto/inside-secure/safexcel_ring.c rdesc = safexcel_ring_next_wptr(priv, &priv->ring[ring_id].rdr); ring_id 207 drivers/crypto/qat/qat_common/adf_transport_debug.c int ring_id = *((int *)v) - 1; ring_id 208 drivers/crypto/qat/qat_common/adf_transport_debug.c struct adf_etr_ring_data *ring = &bank->rings[ring_id]; ring_id 212 drivers/crypto/qat/qat_common/adf_transport_debug.c if (!(bank->ring_mask & 1 << ring_id)) ring_id 1105 drivers/dma/xgene-dma.c u32 ring_id, val; ring_id 1117 drivers/dma/xgene-dma.c ring_id = XGENE_DMA_RING_ID_SETUP(ring->id); ring_id 1118 drivers/dma/xgene-dma.c iowrite32(ring_id, ring->pdma->csr_ring + XGENE_DMA_RING_ID); ring_id 49 drivers/gpu/drm/amd/amdgpu/amdgpu_irq.h unsigned ring_id; ring_id 83 drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h __field(unsigned, ring_id) ring_id 95 drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h __entry->ring_id = iv->ring_id; ring_id 109 drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h __entry->ring_id, __entry->vmid, ring_id 257 drivers/gpu/drm/amd/amdgpu/cik_ih.c entry->ring_id = dw[2] & 0xff; ring_id 1163 drivers/gpu/drm/amd/amdgpu/cik_sdma.c instance_id = (entry->ring_id & 0x3) >> 0; ring_id 1164 drivers/gpu/drm/amd/amdgpu/cik_sdma.c queue_id = (entry->ring_id & 0xc) >> 2; ring_id 1205 drivers/gpu/drm/amd/amdgpu/cik_sdma.c instance_id = (entry->ring_id & 0x3) >> 0; ring_id 236 drivers/gpu/drm/amd/amdgpu/cz_ih.c entry->ring_id = dw[2] & 0xff; ring_id 1239 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c static int gfx_v10_0_gfx_ring_init(struct amdgpu_device *adev, int ring_id, ring_id 1246 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c ring = &adev->gfx.gfx_ring[ring_id]; ring_id 1255 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c if (!ring_id) ring_id 1269 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c static int gfx_v10_0_compute_ring_init(struct amdgpu_device *adev, int ring_id, ring_id 1274 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c struct amdgpu_ring *ring = &adev->gfx.compute_ring[ring_id]; ring_id 1276 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c ring = &adev->gfx.compute_ring[ring_id]; ring_id 1285 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c ring->doorbell_index = (adev->doorbell_index.mec_ring0 + ring_id) << 1; ring_id 1287 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c + (ring_id * GFX10_MEC_HPD_SIZE); ring_id 1305 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c int i, j, k, r, ring_id = 0; ring_id 1383 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c r = gfx_v10_0_gfx_ring_init(adev, ring_id, ring_id 1387 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c ring_id++; ring_id 1392 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c ring_id = 0; ring_id 1401 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c r = gfx_v10_0_compute_ring_init(adev, ring_id, ring_id 1406 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c ring_id++; ring_id 4999 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c me_id = (entry->ring_id & 0x0c) >> 2; ring_id 5000 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c pipe_id = (entry->ring_id & 0x03) >> 0; ring_id 5001 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c queue_id = (entry->ring_id & 0x70) >> 4; ring_id 5069 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c me_id = (entry->ring_id & 0x0c) >> 2; ring_id 5070 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c pipe_id = (entry->ring_id & 0x03) >> 0; ring_id 5071 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c queue_id = (entry->ring_id & 0x70) >> 4; ring_id 5166 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c me_id = (entry->ring_id & 0x0c) >> 2; ring_id 5167 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c pipe_id = (entry->ring_id & 0x03) >> 0; ring_id 5168 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c queue_id = (entry->ring_id & 0x70) >> 4; ring_id 3372 drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c switch (entry->ring_id) { ring_id 3378 drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c amdgpu_fence_process(&adev->gfx.compute_ring[entry->ring_id - 1]); ring_id 3391 drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c switch (entry->ring_id) { ring_id 3397 drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c ring = &adev->gfx.compute_ring[entry->ring_id - 1]; ring_id 3073 drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c static int gfx_v7_0_compute_queue_init(struct amdgpu_device *adev, int ring_id) ring_id 3078 drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c struct amdgpu_ring *ring = &adev->gfx.compute_ring[ring_id]; ring_id 4398 drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c static int gfx_v7_0_compute_ring_init(struct amdgpu_device *adev, int ring_id, ring_id 4403 drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c struct amdgpu_ring *ring = &adev->gfx.compute_ring[ring_id]; ring_id 4412 drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c ring->doorbell_index = adev->doorbell_index.mec_ring0 + ring_id; ring_id 4433 drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c int i, j, k, r, ring_id; ring_id 4499 drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c ring_id = 0; ring_id 4507 drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c ring_id, ring_id 4512 drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c ring_id++; ring_id 4872 drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c me_id = (entry->ring_id & 0x0c) >> 2; ring_id 4873 drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c pipe_id = (entry->ring_id & 0x03) >> 0; ring_id 4897 drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c me_id = (entry->ring_id & 0x0c) >> 2; ring_id 4898 drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c pipe_id = (entry->ring_id & 0x03) >> 0; ring_id 1914 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c static int gfx_v8_0_compute_ring_init(struct amdgpu_device *adev, int ring_id, ring_id 1919 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c struct amdgpu_ring *ring = &adev->gfx.compute_ring[ring_id]; ring_id 1921 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c ring = &adev->gfx.compute_ring[ring_id]; ring_id 1930 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c ring->doorbell_index = adev->doorbell_index.mec_ring0 + ring_id; ring_id 1932 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c + (ring_id * GFX8_MEC_HPD_SIZE); ring_id 1953 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c int i, j, k, r, ring_id; ring_id 2052 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c ring_id = 0; ring_id 2060 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c ring_id, ring_id 2065 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c ring_id++; ring_id 6736 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c me_id = (entry->ring_id & 0x0c) >> 2; ring_id 6737 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c pipe_id = (entry->ring_id & 0x03) >> 0; ring_id 6738 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c queue_id = (entry->ring_id & 0x70) >> 4; ring_id 6766 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c me_id = (entry->ring_id & 0x0c) >> 2; ring_id 6767 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c pipe_id = (entry->ring_id & 0x03) >> 0; ring_id 6768 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c queue_id = (entry->ring_id & 0x70) >> 4; ring_id 2156 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c static int gfx_v9_0_compute_ring_init(struct amdgpu_device *adev, int ring_id, ring_id 2161 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c struct amdgpu_ring *ring = &adev->gfx.compute_ring[ring_id]; ring_id 2163 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c ring = &adev->gfx.compute_ring[ring_id]; ring_id 2172 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c ring->doorbell_index = (adev->doorbell_index.mec_ring0 + ring_id) << 1; ring_id 2174 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c + (ring_id * GFX9_MEC_HPD_SIZE); ring_id 2193 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c int i, j, k, r, ring_id; ring_id 2283 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c ring_id = 0; ring_id 2291 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c ring_id, ring_id 2296 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c ring_id++; ring_id 5671 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c me_id = (entry->ring_id & 0x0c) >> 2; ring_id 5672 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c pipe_id = (entry->ring_id & 0x03) >> 0; ring_id 5673 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c queue_id = (entry->ring_id & 0x70) >> 4; ring_id 5701 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c me_id = (entry->ring_id & 0x0c) >> 2; ring_id 5702 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c pipe_id = (entry->ring_id & 0x03) >> 0; ring_id 5703 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c queue_id = (entry->ring_id & 0x70) >> 4; ring_id 161 drivers/gpu/drm/amd/amdgpu/gmc_v10_0.c entry->src_id, entry->ring_id, entry->vmid, ring_id 381 drivers/gpu/drm/amd/amdgpu/gmc_v9_0.c entry->src_id, entry->ring_id, entry->vmid, ring_id 236 drivers/gpu/drm/amd/amdgpu/iceland_ih.c entry->ring_id = dw[2] & 0xff; ring_id 271 drivers/gpu/drm/amd/amdgpu/navi10_ih.c entry->ring_id = (dw[0] >> 16) & 0xff; ring_id 1056 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c instance_id = (entry->ring_id & 0x3) >> 0; ring_id 1057 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c queue_id = (entry->ring_id & 0xc) >> 2; ring_id 1097 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c instance_id = (entry->ring_id & 0x3) >> 0; ring_id 1098 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c queue_id = (entry->ring_id & 0xc) >> 2; ring_id 1390 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c instance_id = (entry->ring_id & 0x3) >> 0; ring_id 1391 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c queue_id = (entry->ring_id & 0xc) >> 2; ring_id 1431 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c instance_id = (entry->ring_id & 0x3) >> 0; ring_id 1432 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c queue_id = (entry->ring_id & 0xc) >> 2; ring_id 2008 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c switch (entry->ring_id) { ring_id 2086 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c switch (entry->ring_id) { ring_id 1433 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c switch (entry->ring_id) { ring_id 1449 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c switch (entry->ring_id) { ring_id 139 drivers/gpu/drm/amd/amdgpu/si_ih.c entry->ring_id = dw[2] & 0xff; ring_id 238 drivers/gpu/drm/amd/amdgpu/tonga_ih.c entry->ring_id = dw[2] & 0xff; ring_id 454 drivers/gpu/drm/amd/amdgpu/vega10_ih.c entry->ring_id = (dw[0] >> 16) & 0xff; ring_id 543 drivers/gpu/drm/amd/amdgpu/vega10_ih.c switch (entry->ring_id) { ring_id 53 drivers/gpu/drm/amd/amdkfd/cik_event_interrupt.c tmp_ihre->ring_id &= 0x000000ff; ring_id 54 drivers/gpu/drm/amd/amdkfd/cik_event_interrupt.c tmp_ihre->ring_id |= vmid << 8; ring_id 55 drivers/gpu/drm/amd/amdkfd/cik_event_interrupt.c tmp_ihre->ring_id |= pasid << 16; ring_id 63 drivers/gpu/drm/amd/amdkfd/cik_event_interrupt.c vmid = (ihre->ring_id & 0x0000ff00) >> 8; ring_id 69 drivers/gpu/drm/amd/amdkfd/cik_event_interrupt.c pasid = (ihre->ring_id & 0xffff0000) >> 16; ring_id 90 drivers/gpu/drm/amd/amdkfd/cik_event_interrupt.c unsigned int vmid = (ihre->ring_id & 0x0000ff00) >> 8; ring_id 91 drivers/gpu/drm/amd/amdkfd/cik_event_interrupt.c unsigned int pasid = (ihre->ring_id & 0xffff0000) >> 16; ring_id 31 drivers/gpu/drm/amd/amdkfd/cik_int.h uint32_t ring_id; ring_id 111 drivers/gpu/drm/amd/amdkfd/kfd_int_process_v9.c uint16_t ring_id = SOC15_RING_ID_FROM_IH_ENTRY(ih_ring_entry); ring_id 117 drivers/gpu/drm/amd/amdkfd/kfd_int_process_v9.c info.prot_valid = ring_id & 0x08; ring_id 118 drivers/gpu/drm/amd/amdkfd/kfd_int_process_v9.c info.prot_read = ring_id & 0x10; ring_id 119 drivers/gpu/drm/amd/amdkfd/kfd_int_process_v9.c info.prot_write = ring_id & 0x20; ring_id 463 drivers/gpu/drm/i915/gvt/cmd_parser.c int ring_id; ring_id 636 drivers/gpu/drm/i915/gvt/cmd_parser.c static inline u32 get_opcode(u32 cmd, int ring_id) ring_id 640 drivers/gpu/drm/i915/gvt/cmd_parser.c d_info = ring_decode_info[ring_id][CMD_TYPE(cmd)]; ring_id 648 drivers/gpu/drm/i915/gvt/cmd_parser.c unsigned int opcode, int ring_id) ring_id 653 drivers/gpu/drm/i915/gvt/cmd_parser.c if (opcode == e->info->opcode && e->info->rings & BIT(ring_id)) ring_id 660 drivers/gpu/drm/i915/gvt/cmd_parser.c u32 cmd, int ring_id) ring_id 664 drivers/gpu/drm/i915/gvt/cmd_parser.c opcode = get_opcode(cmd, ring_id); ring_id 668 drivers/gpu/drm/i915/gvt/cmd_parser.c return find_cmd_entry(gvt, opcode, ring_id); ring_id 676 drivers/gpu/drm/i915/gvt/cmd_parser.c static inline void print_opcode(u32 cmd, int ring_id) ring_id 681 drivers/gpu/drm/i915/gvt/cmd_parser.c d_info = ring_decode_info[ring_id][CMD_TYPE(cmd)]; ring_id 712 drivers/gpu/drm/i915/gvt/cmd_parser.c s->ring_id, s->ring_start, s->ring_start + s->ring_size, ring_id 730 drivers/gpu/drm/i915/gvt/cmd_parser.c print_opcode(cmd_val(s, 0), s->ring_id); ring_id 851 drivers/gpu/drm/i915/gvt/cmd_parser.c ring_base = dev_priv->engine[s->ring_id]->mmio_base; ring_id 980 drivers/gpu/drm/i915/gvt/cmd_parser.c if (IS_BROADWELL(gvt->dev_priv) && s->ring_id != RCS0) { ring_id 981 drivers/gpu/drm/i915/gvt/cmd_parser.c if (s->ring_id == BCS0 && ring_id 1142 drivers/gpu/drm/i915/gvt/cmd_parser.c hws_pga = s->vgpu->hws_pga[s->ring_id]; ring_id 1156 drivers/gpu/drm/i915/gvt/cmd_parser.c set_bit(cmd_interrupt_events[s->ring_id].pipe_control_notify, ring_id 1163 drivers/gpu/drm/i915/gvt/cmd_parser.c set_bit(cmd_interrupt_events[s->ring_id].mi_user_interrupt, ring_id 1668 drivers/gpu/drm/i915/gvt/cmd_parser.c hws_pga = s->vgpu->hws_pga[s->ring_id]; ring_id 1677 drivers/gpu/drm/i915/gvt/cmd_parser.c set_bit(cmd_interrupt_events[s->ring_id].mi_flush_dw, ring_id 1727 drivers/gpu/drm/i915/gvt/cmd_parser.c !(s->vgpu->scan_nonprivbb & (1 << s->ring_id))) ring_id 1754 drivers/gpu/drm/i915/gvt/cmd_parser.c info = get_cmd_info(s->vgpu->gvt, cmd, s->ring_id); ring_id 1757 drivers/gpu/drm/i915/gvt/cmd_parser.c cmd, get_opcode(cmd, s->ring_id), ring_id 1759 drivers/gpu/drm/i915/gvt/cmd_parser.c "ppgtt" : "ggtt", s->ring_id, s->workload); ring_id 1766 drivers/gpu/drm/i915/gvt/cmd_parser.c info = get_cmd_info(s->vgpu->gvt, cmd, s->ring_id); ring_id 1769 drivers/gpu/drm/i915/gvt/cmd_parser.c cmd, get_opcode(cmd, s->ring_id), ring_id 1771 drivers/gpu/drm/i915/gvt/cmd_parser.c "ppgtt" : "ggtt", s->ring_id, s->workload); ring_id 1800 drivers/gpu/drm/i915/gvt/cmd_parser.c info = get_cmd_info(s->vgpu->gvt, cmd, s->ring_id); ring_id 1803 drivers/gpu/drm/i915/gvt/cmd_parser.c cmd, get_opcode(cmd, s->ring_id), ring_id 1805 drivers/gpu/drm/i915/gvt/cmd_parser.c "ppgtt" : "ggtt", s->ring_id, s->workload); ring_id 2667 drivers/gpu/drm/i915/gvt/cmd_parser.c info = get_cmd_info(s->vgpu->gvt, cmd, s->ring_id); ring_id 2671 drivers/gpu/drm/i915/gvt/cmd_parser.c cmd, get_opcode(cmd, s->ring_id), ring_id 2673 drivers/gpu/drm/i915/gvt/cmd_parser.c "ppgtt" : "ggtt", s->ring_id, s->workload); ring_id 2679 drivers/gpu/drm/i915/gvt/cmd_parser.c trace_gvt_command(vgpu->id, s->ring_id, s->ip_gma, s->ip_va, ring_id 2782 drivers/gpu/drm/i915/gvt/cmd_parser.c s.ring_id = workload->ring_id; ring_id 2791 drivers/gpu/drm/i915/gvt/cmd_parser.c if ((bypass_scan_mask & (1 << workload->ring_id)) || ring_id 2831 drivers/gpu/drm/i915/gvt/cmd_parser.c s.ring_id = workload->ring_id; ring_id 2856 drivers/gpu/drm/i915/gvt/cmd_parser.c int ring_id = workload->ring_id; ring_id 2869 drivers/gpu/drm/i915/gvt/cmd_parser.c if (workload->rb_len > s->ring_scan_buffer_size[ring_id]) { ring_id 2873 drivers/gpu/drm/i915/gvt/cmd_parser.c p = krealloc(s->ring_scan_buffer[ring_id], workload->rb_len, ring_id 2879 drivers/gpu/drm/i915/gvt/cmd_parser.c s->ring_scan_buffer[ring_id] = p; ring_id 2880 drivers/gpu/drm/i915/gvt/cmd_parser.c s->ring_scan_buffer_size[ring_id] = workload->rb_len; ring_id 2883 drivers/gpu/drm/i915/gvt/cmd_parser.c shadow_ring_buffer_va = s->ring_scan_buffer[ring_id]; ring_id 42 drivers/gpu/drm/i915/gvt/execlist.c #define execlist_ring_mmio(gvt, ring_id, offset) \ ring_id 43 drivers/gpu/drm/i915/gvt/execlist.c (gvt->dev_priv->engine[ring_id]->mmio_base + (offset)) ring_id 57 drivers/gpu/drm/i915/gvt/execlist.c static int ring_id_to_context_switch_event(unsigned int ring_id) ring_id 59 drivers/gpu/drm/i915/gvt/execlist.c if (WARN_ON(ring_id >= ARRAY_SIZE(context_switch_events))) ring_id 62 drivers/gpu/drm/i915/gvt/execlist.c return context_switch_events[ring_id]; ring_id 96 drivers/gpu/drm/i915/gvt/execlist.c int ring_id = execlist->ring_id; ring_id 98 drivers/gpu/drm/i915/gvt/execlist.c ring_id, _EL_OFFSET_STATUS); ring_id 131 drivers/gpu/drm/i915/gvt/execlist.c int ring_id = execlist->ring_id; ring_id 138 drivers/gpu/drm/i915/gvt/execlist.c ctx_status_ptr_reg = execlist_ring_mmio(vgpu->gvt, ring_id, ring_id 140 drivers/gpu/drm/i915/gvt/execlist.c ctx_status_buf_reg = execlist_ring_mmio(vgpu->gvt, ring_id, ring_id 164 drivers/gpu/drm/i915/gvt/execlist.c vgpu->hws_pga[ring_id]); ring_id 183 drivers/gpu/drm/i915/gvt/execlist.c ring_id_to_context_switch_event(execlist->ring_id)); ring_id 264 drivers/gpu/drm/i915/gvt/execlist.c int ring_id = execlist->ring_id; ring_id 265 drivers/gpu/drm/i915/gvt/execlist.c u32 status_reg = execlist_ring_mmio(vgpu->gvt, ring_id, ring_id 382 drivers/gpu/drm/i915/gvt/execlist.c int ring_id = workload->ring_id; ring_id 391 drivers/gpu/drm/i915/gvt/execlist.c ret = emulate_execlist_schedule_in(&s->execlist[ring_id], ctx); ring_id 402 drivers/gpu/drm/i915/gvt/execlist.c int ring_id = workload->ring_id; ring_id 404 drivers/gpu/drm/i915/gvt/execlist.c struct intel_vgpu_execlist *execlist = &s->execlist[ring_id]; ring_id 406 drivers/gpu/drm/i915/gvt/execlist.c struct list_head *next = workload_q_head(vgpu, ring_id)->next; ring_id 413 drivers/gpu/drm/i915/gvt/execlist.c if (workload->status || (vgpu->resetting_eng & BIT(ring_id))) ring_id 416 drivers/gpu/drm/i915/gvt/execlist.c if (!list_empty(workload_q_head(vgpu, ring_id))) { ring_id 439 drivers/gpu/drm/i915/gvt/execlist.c static int submit_context(struct intel_vgpu *vgpu, int ring_id, ring_id 446 drivers/gpu/drm/i915/gvt/execlist.c workload = intel_vgpu_create_workload(vgpu, ring_id, desc); ring_id 455 drivers/gpu/drm/i915/gvt/execlist.c workload->elsp_dwords = s->execlist[ring_id].elsp_dwords; ring_id 464 drivers/gpu/drm/i915/gvt/execlist.c int intel_vgpu_submit_execlist(struct intel_vgpu *vgpu, int ring_id) ring_id 467 drivers/gpu/drm/i915/gvt/execlist.c struct intel_vgpu_execlist *execlist = &s->execlist[ring_id]; ring_id 492 drivers/gpu/drm/i915/gvt/execlist.c ret = submit_context(vgpu, ring_id, desc[i], i == 0); ring_id 507 drivers/gpu/drm/i915/gvt/execlist.c static void init_vgpu_execlist(struct intel_vgpu *vgpu, int ring_id) ring_id 510 drivers/gpu/drm/i915/gvt/execlist.c struct intel_vgpu_execlist *execlist = &s->execlist[ring_id]; ring_id 517 drivers/gpu/drm/i915/gvt/execlist.c execlist->ring_id = ring_id; ring_id 521 drivers/gpu/drm/i915/gvt/execlist.c ctx_status_ptr_reg = execlist_ring_mmio(vgpu->gvt, ring_id, ring_id 171 drivers/gpu/drm/i915/gvt/execlist.h int ring_id; ring_id 180 drivers/gpu/drm/i915/gvt/execlist.h int intel_vgpu_submit_execlist(struct intel_vgpu *vgpu, int ring_id); ring_id 512 drivers/gpu/drm/i915/gvt/handlers.c int ring_id = intel_gvt_render_mmio_to_ring_id(vgpu->gvt, offset); ring_id 517 drivers/gpu/drm/i915/gvt/handlers.c if ((bytes != 4) || ((offset & (bytes - 1)) != 0) || ring_id < 0) { ring_id 519 drivers/gpu/drm/i915/gvt/handlers.c vgpu->id, ring_id, offset, bytes); ring_id 523 drivers/gpu/drm/i915/gvt/handlers.c ring_base = dev_priv->engine[ring_id]->mmio_base; ring_id 1473 drivers/gpu/drm/i915/gvt/handlers.c int ring_id = intel_gvt_render_mmio_to_ring_id(vgpu->gvt, offset); ring_id 1485 drivers/gpu/drm/i915/gvt/handlers.c if (unlikely(ring_id < 0 || ring_id >= I915_NUM_ENGINES)) { ring_id 1490 drivers/gpu/drm/i915/gvt/handlers.c vgpu->hws_pga[ring_id] = value; ring_id 1640 drivers/gpu/drm/i915/gvt/handlers.c int ring_id; ring_id 1643 drivers/gpu/drm/i915/gvt/handlers.c ring_id = intel_gvt_render_mmio_to_ring_id(gvt, offset); ring_id 1650 drivers/gpu/drm/i915/gvt/handlers.c if (ring_id >= 0) ring_id 1651 drivers/gpu/drm/i915/gvt/handlers.c ring_base = dev_priv->engine[ring_id]->mmio_base; ring_id 1653 drivers/gpu/drm/i915/gvt/handlers.c if (ring_id < 0 || vgpu == gvt->scheduler.engine_owner[ring_id] || ring_id 1667 drivers/gpu/drm/i915/gvt/handlers.c int ring_id = intel_gvt_render_mmio_to_ring_id(vgpu->gvt, offset); ring_id 1672 drivers/gpu/drm/i915/gvt/handlers.c if (WARN_ON(ring_id < 0 || ring_id >= I915_NUM_ENGINES)) ring_id 1675 drivers/gpu/drm/i915/gvt/handlers.c execlist = &vgpu->submission.execlist[ring_id]; ring_id 1679 drivers/gpu/drm/i915/gvt/handlers.c ret = intel_vgpu_submit_execlist(vgpu, ring_id); ring_id 1682 drivers/gpu/drm/i915/gvt/handlers.c ring_id); ring_id 1694 drivers/gpu/drm/i915/gvt/handlers.c int ring_id = intel_gvt_render_mmio_to_ring_id(vgpu->gvt, offset); ring_id 1730 drivers/gpu/drm/i915/gvt/handlers.c ring_id); ring_id 1736 drivers/gpu/drm/i915/gvt/handlers.c BIT(ring_id), ring_id 229 drivers/gpu/drm/i915/gvt/interrupt.h int gvt_ring_id_to_pipe_control_notify_event(int ring_id); ring_id 230 drivers/gpu/drm/i915/gvt/interrupt.h int gvt_ring_id_to_mi_flush_dw_event(int ring_id); ring_id 231 drivers/gpu/drm/i915/gvt/interrupt.h int gvt_ring_id_to_mi_user_interrupt_event(int ring_id); ring_id 165 drivers/gpu/drm/i915/gvt/mmio_context.c int ring_id, i; ring_id 171 drivers/gpu/drm/i915/gvt/mmio_context.c for (ring_id = 0; ring_id < cnt; ring_id++) { ring_id 172 drivers/gpu/drm/i915/gvt/mmio_context.c if (!HAS_ENGINE(dev_priv, ring_id)) ring_id 174 drivers/gpu/drm/i915/gvt/mmio_context.c offset.reg = regs[ring_id]; ring_id 176 drivers/gpu/drm/i915/gvt/mmio_context.c gen9_render_mocs.control_table[ring_id][i] = ring_id 199 drivers/gpu/drm/i915/gvt/mmio_context.c int ring_id = req->engine->id; ring_id 200 drivers/gpu/drm/i915/gvt/mmio_context.c int count = gvt->engine_mmio_list.ctx_mmio_count[ring_id]; ring_id 216 drivers/gpu/drm/i915/gvt/mmio_context.c if (mmio->ring_id != ring_id || ring_id 224 drivers/gpu/drm/i915/gvt/mmio_context.c *(cs-2), *(cs-1), vgpu->id, ring_id); ring_id 346 drivers/gpu/drm/i915/gvt/mmio_context.c static void handle_tlb_pending_event(struct intel_vgpu *vgpu, int ring_id) ring_id 359 drivers/gpu/drm/i915/gvt/mmio_context.c if (WARN_ON(ring_id >= cnt)) ring_id 362 drivers/gpu/drm/i915/gvt/mmio_context.c if (!test_and_clear_bit(ring_id, (void *)s->tlb_handle_pending)) ring_id 365 drivers/gpu/drm/i915/gvt/mmio_context.c reg = _MMIO(regs[ring_id]); ring_id 374 drivers/gpu/drm/i915/gvt/mmio_context.c if (ring_id == RCS0 && INTEL_GEN(dev_priv) >= 9) ring_id 382 drivers/gpu/drm/i915/gvt/mmio_context.c gvt_vgpu_err("timeout in invalidate ring (%d) tlb\n", ring_id); ring_id 388 drivers/gpu/drm/i915/gvt/mmio_context.c gvt_dbg_core("invalidate TLB for ring %d\n", ring_id); ring_id 392 drivers/gpu/drm/i915/gvt/mmio_context.c int ring_id) ring_id 408 drivers/gpu/drm/i915/gvt/mmio_context.c if (WARN_ON(ring_id >= ARRAY_SIZE(regs))) ring_id 411 drivers/gpu/drm/i915/gvt/mmio_context.c if (ring_id == RCS0 && IS_GEN(dev_priv, 9)) ring_id 417 drivers/gpu/drm/i915/gvt/mmio_context.c offset.reg = regs[ring_id]; ring_id 422 drivers/gpu/drm/i915/gvt/mmio_context.c old_v = gen9_render_mocs.control_table[ring_id][i]; ring_id 426 drivers/gpu/drm/i915/gvt/mmio_context.c new_v = gen9_render_mocs.control_table[ring_id][i]; ring_id 434 drivers/gpu/drm/i915/gvt/mmio_context.c if (ring_id == RCS0) { ring_id 469 drivers/gpu/drm/i915/gvt/mmio_context.c int ring_id) ring_id 478 drivers/gpu/drm/i915/gvt/mmio_context.c switch_mocs(pre, next, ring_id); ring_id 482 drivers/gpu/drm/i915/gvt/mmio_context.c if (mmio->ring_id != ring_id) ring_id 511 drivers/gpu/drm/i915/gvt/mmio_context.c !is_inhibit_context(s->shadow[ring_id])) ring_id 538 drivers/gpu/drm/i915/gvt/mmio_context.c handle_tlb_pending_event(next, ring_id); ring_id 551 drivers/gpu/drm/i915/gvt/mmio_context.c struct intel_vgpu *next, int ring_id) ring_id 558 drivers/gpu/drm/i915/gvt/mmio_context.c gvt_dbg_render("switch ring %d from %s to %s\n", ring_id, ring_id 569 drivers/gpu/drm/i915/gvt/mmio_context.c switch_mmio(pre, next, ring_id); ring_id 597 drivers/gpu/drm/i915/gvt/mmio_context.c gvt->engine_mmio_list.ctx_mmio_count[mmio->ring_id]++; ring_id 40 drivers/gpu/drm/i915/gvt/mmio_context.h int ring_id; ring_id 48 drivers/gpu/drm/i915/gvt/mmio_context.h struct intel_vgpu *next, int ring_id); ring_id 447 drivers/gpu/drm/i915/gvt/sched_policy.c int ring_id; ring_id 470 drivers/gpu/drm/i915/gvt/sched_policy.c for (ring_id = 0; ring_id < I915_NUM_ENGINES; ring_id++) { ring_id 471 drivers/gpu/drm/i915/gvt/sched_policy.c if (scheduler->engine_owner[ring_id] == vgpu) { ring_id 472 drivers/gpu/drm/i915/gvt/sched_policy.c intel_gvt_switch_mmio(vgpu, NULL, ring_id); ring_id 473 drivers/gpu/drm/i915/gvt/sched_policy.c scheduler->engine_owner[ring_id] = NULL; ring_id 100 drivers/gpu/drm/i915/gvt/scheduler.c if (workload->ring_id != RCS0) ring_id 130 drivers/gpu/drm/i915/gvt/scheduler.c int ring_id = workload->ring_id; ring_id 156 drivers/gpu/drm/i915/gvt/scheduler.c if (ring_id == RCS0) { ring_id 177 drivers/gpu/drm/i915/gvt/scheduler.c gvt_dbg_sched("ring id %d workload lrca %x", ring_id, ring_id 180 drivers/gpu/drm/i915/gvt/scheduler.c context_page_num = gvt->dev_priv->engine[ring_id]->context_size; ring_id 184 drivers/gpu/drm/i915/gvt/scheduler.c if (IS_BROADWELL(gvt->dev_priv) && ring_id == RCS0) ring_id 212 drivers/gpu/drm/i915/gvt/scheduler.c static void save_ring_hw_state(struct intel_vgpu *vgpu, int ring_id) ring_id 215 drivers/gpu/drm/i915/gvt/scheduler.c u32 ring_base = dev_priv->engine[ring_id]->mmio_base; ring_id 233 drivers/gpu/drm/i915/gvt/scheduler.c enum intel_engine_id ring_id = req->engine->id; ring_id 240 drivers/gpu/drm/i915/gvt/scheduler.c scheduler->engine_owner[ring_id]) { ring_id 242 drivers/gpu/drm/i915/gvt/scheduler.c intel_gvt_switch_mmio(scheduler->engine_owner[ring_id], ring_id 243 drivers/gpu/drm/i915/gvt/scheduler.c NULL, ring_id); ring_id 244 drivers/gpu/drm/i915/gvt/scheduler.c scheduler->engine_owner[ring_id] = NULL; ring_id 251 drivers/gpu/drm/i915/gvt/scheduler.c workload = scheduler->current_workload[ring_id]; ring_id 258 drivers/gpu/drm/i915/gvt/scheduler.c if (workload->vgpu != scheduler->engine_owner[ring_id]) { ring_id 260 drivers/gpu/drm/i915/gvt/scheduler.c intel_gvt_switch_mmio(scheduler->engine_owner[ring_id], ring_id 261 drivers/gpu/drm/i915/gvt/scheduler.c workload->vgpu, ring_id); ring_id 262 drivers/gpu/drm/i915/gvt/scheduler.c scheduler->engine_owner[ring_id] = workload->vgpu; ring_id 265 drivers/gpu/drm/i915/gvt/scheduler.c ring_id, workload->vgpu->id); ring_id 270 drivers/gpu/drm/i915/gvt/scheduler.c save_ring_hw_state(workload->vgpu, ring_id); ring_id 274 drivers/gpu/drm/i915/gvt/scheduler.c save_ring_hw_state(workload->vgpu, ring_id); ring_id 400 drivers/gpu/drm/i915/gvt/scheduler.c rq = i915_request_create(s->shadow[workload->ring_id]); ring_id 430 drivers/gpu/drm/i915/gvt/scheduler.c if (!test_and_set_bit(workload->ring_id, s->shadow_ctx_desc_updated)) ring_id 431 drivers/gpu/drm/i915/gvt/scheduler.c shadow_context_descriptor_update(s->shadow[workload->ring_id], ring_id 438 drivers/gpu/drm/i915/gvt/scheduler.c if (workload->ring_id == RCS0 && workload->wa_ctx.indirect_ctx.size) { ring_id 581 drivers/gpu/drm/i915/gvt/scheduler.c ring_base = dev_priv->engine[workload->ring_id]->mmio_base; ring_id 624 drivers/gpu/drm/i915/gvt/scheduler.c int ring = workload->ring_id; ring_id 694 drivers/gpu/drm/i915/gvt/scheduler.c int ring_id = workload->ring_id; ring_id 698 drivers/gpu/drm/i915/gvt/scheduler.c ring_id, workload); ring_id 729 drivers/gpu/drm/i915/gvt/scheduler.c ring_id, workload->req); ring_id 742 drivers/gpu/drm/i915/gvt/scheduler.c struct intel_gvt *gvt, int ring_id) ring_id 754 drivers/gpu/drm/i915/gvt/scheduler.c gvt_dbg_sched("ring id %d stop - no current vgpu\n", ring_id); ring_id 759 drivers/gpu/drm/i915/gvt/scheduler.c gvt_dbg_sched("ring id %d stop - will reschedule\n", ring_id); ring_id 764 drivers/gpu/drm/i915/gvt/scheduler.c list_empty(workload_q_head(scheduler->current_vgpu, ring_id))) ring_id 771 drivers/gpu/drm/i915/gvt/scheduler.c if (scheduler->current_workload[ring_id]) { ring_id 772 drivers/gpu/drm/i915/gvt/scheduler.c workload = scheduler->current_workload[ring_id]; ring_id 774 drivers/gpu/drm/i915/gvt/scheduler.c ring_id, workload); ring_id 784 drivers/gpu/drm/i915/gvt/scheduler.c scheduler->current_workload[ring_id] = container_of( ring_id 785 drivers/gpu/drm/i915/gvt/scheduler.c workload_q_head(scheduler->current_vgpu, ring_id)->next, ring_id 788 drivers/gpu/drm/i915/gvt/scheduler.c workload = scheduler->current_workload[ring_id]; ring_id 790 drivers/gpu/drm/i915/gvt/scheduler.c gvt_dbg_sched("ring id %d pick new workload %p\n", ring_id, workload); ring_id 830 drivers/gpu/drm/i915/gvt/scheduler.c ring_base = dev_priv->engine[workload->ring_id]->mmio_base; ring_id 904 drivers/gpu/drm/i915/gvt/scheduler.c static void complete_current_workload(struct intel_gvt *gvt, int ring_id) ring_id 908 drivers/gpu/drm/i915/gvt/scheduler.c scheduler->current_workload[ring_id]; ring_id 938 drivers/gpu/drm/i915/gvt/scheduler.c !(vgpu->resetting_eng & BIT(ring_id))) { ring_id 950 drivers/gpu/drm/i915/gvt/scheduler.c ring_id, workload, workload->status); ring_id 952 drivers/gpu/drm/i915/gvt/scheduler.c scheduler->current_workload[ring_id] = NULL; ring_id 956 drivers/gpu/drm/i915/gvt/scheduler.c if (workload->status || vgpu->resetting_eng & BIT(ring_id)) { ring_id 970 drivers/gpu/drm/i915/gvt/scheduler.c intel_vgpu_clean_workloads(vgpu, BIT(ring_id)); ring_id 987 drivers/gpu/drm/i915/gvt/scheduler.c int ring_id; ring_id 994 drivers/gpu/drm/i915/gvt/scheduler.c int ring_id = p->ring_id; ring_id 1005 drivers/gpu/drm/i915/gvt/scheduler.c gvt_dbg_core("workload thread for ring %d started\n", ring_id); ring_id 1008 drivers/gpu/drm/i915/gvt/scheduler.c add_wait_queue(&scheduler->waitq[ring_id], &wait); ring_id 1010 drivers/gpu/drm/i915/gvt/scheduler.c workload = pick_next_workload(gvt, ring_id); ring_id 1016 drivers/gpu/drm/i915/gvt/scheduler.c remove_wait_queue(&scheduler->waitq[ring_id], &wait); ring_id 1022 drivers/gpu/drm/i915/gvt/scheduler.c workload->ring_id, workload, ring_id 1028 drivers/gpu/drm/i915/gvt/scheduler.c workload->ring_id, workload); ring_id 1050 drivers/gpu/drm/i915/gvt/scheduler.c workload->ring_id, workload); ring_id 1057 drivers/gpu/drm/i915/gvt/scheduler.c complete_current_workload(gvt, ring_id); ring_id 1122 drivers/gpu/drm/i915/gvt/scheduler.c param->ring_id = i; ring_id 1476 drivers/gpu/drm/i915/gvt/scheduler.c intel_vgpu_create_workload(struct intel_vgpu *vgpu, int ring_id, ring_id 1480 drivers/gpu/drm/i915/gvt/scheduler.c struct list_head *q = workload_q_head(vgpu, ring_id); ring_id 1511 drivers/gpu/drm/i915/gvt/scheduler.c ring_id); ring_id 1523 drivers/gpu/drm/i915/gvt/scheduler.c gvt_dbg_el("ring id %d begin a new workload\n", ring_id); ring_id 1543 drivers/gpu/drm/i915/gvt/scheduler.c workload->ring_id = ring_id; ring_id 1552 drivers/gpu/drm/i915/gvt/scheduler.c if (ring_id == RCS0) { ring_id 1591 drivers/gpu/drm/i915/gvt/scheduler.c workload, ring_id, head, tail, start, ctl); ring_id 1602 drivers/gpu/drm/i915/gvt/scheduler.c if (list_empty(workload_q_head(vgpu, ring_id))) { ring_id 1627 drivers/gpu/drm/i915/gvt/scheduler.c workload_q_head(workload->vgpu, workload->ring_id)); ring_id 1629 drivers/gpu/drm/i915/gvt/scheduler.c wake_up(&workload->vgpu->gvt->scheduler.waitq[workload->ring_id]); ring_id 82 drivers/gpu/drm/i915/gvt/scheduler.h int ring_id; ring_id 132 drivers/gpu/drm/i915/gvt/scheduler.h #define workload_q_head(vgpu, ring_id) \ ring_id 133 drivers/gpu/drm/i915/gvt/scheduler.h (&(vgpu->submission.workload_q_head[ring_id])) ring_id 158 drivers/gpu/drm/i915/gvt/scheduler.h intel_vgpu_create_workload(struct intel_vgpu *vgpu, int ring_id, ring_id 116 drivers/gpu/drm/i915/gvt/trace.h TP_PROTO(int id, char *type, int ring_id, int root_entry_type, ring_id 119 drivers/gpu/drm/i915/gvt/trace.h TP_ARGS(id, type, ring_id, root_entry_type, gma, gpa), ring_id 128 drivers/gpu/drm/i915/gvt/trace.h id, type, ring_id, root_entry_type, gma, gpa); ring_id 229 drivers/gpu/drm/i915/gvt/trace.h TP_PROTO(u8 vgpu_id, u8 ring_id, u32 ip_gma, u32 *cmd_va, ring_id 233 drivers/gpu/drm/i915/gvt/trace.h TP_ARGS(vgpu_id, ring_id, ip_gma, cmd_va, cmd_len, buf_type, ring_id 238 drivers/gpu/drm/i915/gvt/trace.h __field(u8, ring_id) ring_id 250 drivers/gpu/drm/i915/gvt/trace.h __entry->ring_id = ring_id; ring_id 263 drivers/gpu/drm/i915/gvt/trace.h __entry->ring_id, ring_id 7559 drivers/gpu/drm/radeon/cik.c u32 src_id, src_data, ring_id; ring_id 7593 drivers/gpu/drm/radeon/cik.c ring_id = le32_to_cpu(rdev->ih.ring[ring_index + 2]) & 0xff; ring_id 7946 drivers/gpu/drm/radeon/cik.c me_id = (ring_id & 0x60) >> 5; ring_id 7947 drivers/gpu/drm/radeon/cik.c pipe_id = (ring_id & 0x18) >> 3; ring_id 7948 drivers/gpu/drm/radeon/cik.c queue_id = (ring_id & 0x7) >> 0; ring_id 7965 drivers/gpu/drm/radeon/cik.c me_id = (ring_id & 0x60) >> 5; ring_id 7966 drivers/gpu/drm/radeon/cik.c pipe_id = (ring_id & 0x18) >> 3; ring_id 7967 drivers/gpu/drm/radeon/cik.c queue_id = (ring_id & 0x7) >> 0; ring_id 7988 drivers/gpu/drm/radeon/cik.c me_id = (ring_id & 0x60) >> 5; ring_id 7989 drivers/gpu/drm/radeon/cik.c pipe_id = (ring_id & 0x18) >> 3; ring_id 7990 drivers/gpu/drm/radeon/cik.c queue_id = (ring_id & 0x7) >> 0; ring_id 8010 drivers/gpu/drm/radeon/cik.c me_id = (ring_id & 0x3) >> 0; ring_id 8011 drivers/gpu/drm/radeon/cik.c queue_id = (ring_id & 0xc) >> 2; ring_id 8059 drivers/gpu/drm/radeon/cik.c me_id = (ring_id & 0x3) >> 0; ring_id 8060 drivers/gpu/drm/radeon/cik.c queue_id = (ring_id & 0xc) >> 2; ring_id 6252 drivers/gpu/drm/radeon/si.c u32 src_id, src_data, ring_id; ring_id 6284 drivers/gpu/drm/radeon/si.c ring_id = le32_to_cpu(rdev->ih.ring[ring_index + 2]) & 0xff; ring_id 6396 drivers/gpu/drm/radeon/si.c switch (ring_id) { ring_id 1377 drivers/infiniband/hw/bnxt_re/ib_verbs.c srq->qplib_srq.eventq_hw_ring_id = rdev->nq[0].ring_id; ring_id 2595 drivers/infiniband/hw/bnxt_re/ib_verbs.c cq->qplib_cq.cnq_hw_ring_id = nq->ring_id; ring_id 388 drivers/infiniband/hw/bnxt_re/main.c req.ring_id = cpu_to_le16(fw_ring_id); ring_id 394 drivers/infiniband/hw/bnxt_re/main.c "Failed to free HW ring:%d :%#x", req.ring_id, rc); ring_id 430 drivers/infiniband/hw/bnxt_re/main.c *fw_ring_id = le16_to_cpu(resp.ring_id); ring_id 951 drivers/infiniband/hw/bnxt_re/main.c bnxt_re_net_ring_free(rdev, rdev->nq[i].ring_id, type); ring_id 1014 drivers/infiniband/hw/bnxt_re/main.c &rdev->nq[i].ring_id); ring_id 1028 drivers/infiniband/hw/bnxt_re/main.c bnxt_re_net_ring_free(rdev, rdev->nq[i].ring_id, type); ring_id 318 drivers/infiniband/hw/bnxt_re/qplib_fp.c hwq->max_elements, nq->ring_id, ring_id 347 drivers/infiniband/hw/bnxt_re/qplib_fp.c nq->hwq.max_elements, nq->ring_id, gen_p5); ring_id 410 drivers/infiniband/hw/bnxt_re/qplib_fp.c nq->hwq.max_elements, nq->ring_id, gen_p5); ring_id 486 drivers/infiniband/hw/bnxt_re/qplib_fp.h u16 ring_id; ring_id 127 drivers/net/ethernet/apm/xgene/xgene_enet_hw.c u32 ring_id; ring_id 129 drivers/net/ethernet/apm/xgene/xgene_enet_hw.c ring_id = ring->id | OVERWRITE; ring_id 130 drivers/net/ethernet/apm/xgene/xgene_enet_hw.c xgene_enet_ring_wr32(ring, CSR_RING_ID, ring_id); ring_id 1212 drivers/net/ethernet/apm/xgene/xgene_enet_main.c enum xgene_enet_ring_cfgsize cfgsize, u32 ring_id) ring_id 1232 drivers/net/ethernet/apm/xgene/xgene_enet_main.c ring->id = ring_id; ring_id 1313 drivers/net/ethernet/apm/xgene/xgene_enet_main.c u16 ring_id, slots; ring_id 1323 drivers/net/ethernet/apm/xgene/xgene_enet_main.c ring_id = xgene_enet_get_ring_id(RING_OWNER_CPU, cpu_bufnum++); ring_id 1326 drivers/net/ethernet/apm/xgene/xgene_enet_main.c ring_id); ring_id 1334 drivers/net/ethernet/apm/xgene/xgene_enet_main.c ring_id = xgene_enet_get_ring_id(owner, bp_bufnum++); ring_id 1337 drivers/net/ethernet/apm/xgene/xgene_enet_main.c ring_id); ring_id 1365 drivers/net/ethernet/apm/xgene/xgene_enet_main.c ring_id = xgene_enet_get_ring_id(owner, bp_bufnum++); ring_id 1368 drivers/net/ethernet/apm/xgene/xgene_enet_main.c ring_id); ring_id 1398 drivers/net/ethernet/apm/xgene/xgene_enet_main.c ring_id = xgene_enet_get_ring_id(owner, eth_bufnum++); ring_id 1401 drivers/net/ethernet/apm/xgene/xgene_enet_main.c ring_id); ring_id 1422 drivers/net/ethernet/apm/xgene/xgene_enet_main.c ring_id = xgene_enet_get_ring_id(RING_OWNER_CPU, ring_id 1426 drivers/net/ethernet/apm/xgene/xgene_enet_main.c ring_id); ring_id 1813 drivers/net/ethernet/apm/xgene/xgene_enet_main.c u16 dst_ring_num, ring_id; ring_id 1866 drivers/net/ethernet/apm/xgene/xgene_enet_main.c ring_id = (page_pool) ? page_pool->id : 0; ring_id 1868 drivers/net/ethernet/apm/xgene/xgene_enet_main.c buf_pool->id, ring_id); ring_id 120 drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c u32 ring_id; ring_id 122 drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c ring_id = ring->id | OVERWRITE; ring_id 123 drivers/net/ethernet/apm/xgene/xgene_enet_ring2.c xgene_enet_ring_wr32(ring, CSR_RING_ID, ring_id); ring_id 4894 drivers/net/ethernet/broadcom/bnxt/bnxt.c u16 ring_id; ring_id 4896 drivers/net/ethernet/broadcom/bnxt/bnxt.c ring_id = rxr->rx_ring_struct.fw_ring_id; ring_id 4897 drivers/net/ethernet/broadcom/bnxt/bnxt.c *ring_tbl++ = cpu_to_le16(ring_id); ring_id 4898 drivers/net/ethernet/broadcom/bnxt/bnxt.c ring_id = bnxt_cp_ring_for_rx(bp, rxr); ring_id 4899 drivers/net/ethernet/broadcom/bnxt/bnxt.c *ring_tbl++ = cpu_to_le16(ring_id); ring_id 5226 drivers/net/ethernet/broadcom/bnxt/bnxt.c u16 ring_id; ring_id 5320 drivers/net/ethernet/broadcom/bnxt/bnxt.c ring_id = le16_to_cpu(resp->ring_id); ring_id 5328 drivers/net/ethernet/broadcom/bnxt/bnxt.c ring->fw_ring_id = ring_id; ring_id 5534 drivers/net/ethernet/broadcom/bnxt/bnxt.c req.ring_id = cpu_to_le16(ring->fw_ring_id); ring_id 6211 drivers/net/ethernet/broadcom/bnxt/bnxt.c req.ring_id = cpu_to_le16(cpr->cp_ring_struct.fw_ring_id); ring_id 6244 drivers/net/ethernet/broadcom/bnxt/bnxt.c req_rx.ring_id = cpu_to_le16(bnxt_cp_ring_for_rx(bp, bnapi->rx_ring)); ring_id 6268 drivers/net/ethernet/broadcom/bnxt/bnxt.c u16 ring_id; ring_id 6272 drivers/net/ethernet/broadcom/bnxt/bnxt.c ring_id = bnxt_cp_ring_for_tx(bp, bnapi->tx_ring); ring_id 6275 drivers/net/ethernet/broadcom/bnxt/bnxt.c ring_id = bnxt_cp_ring_for_rx(bp, bnapi->rx_ring); ring_id 6277 drivers/net/ethernet/broadcom/bnxt/bnxt.c req->ring_id = cpu_to_le16(ring_id); ring_id 6289 drivers/net/ethernet/broadcom/bnxt/bnxt.c ring_id = bnxt_cp_ring_for_tx(bp, bnapi->tx_ring); ring_id 6290 drivers/net/ethernet/broadcom/bnxt/bnxt.c req->ring_id = cpu_to_le16(ring_id); ring_id 7647 drivers/net/ethernet/broadcom/bnxt/bnxt.c u16 ring_id = i; ring_id 7656 drivers/net/ethernet/broadcom/bnxt/bnxt.c rc = bnxt_hwrm_vnic_alloc(bp, vnic_id, ring_id, 1); ring_id 9864 drivers/net/ethernet/broadcom/bnxt/bnxt.c u32 ring_id, u32 *prod, u32 *cons) ring_id 9872 drivers/net/ethernet/broadcom/bnxt/bnxt.c req.fw_ring_id = cpu_to_le32(ring_id); ring_id 5142 drivers/net/ethernet/broadcom/bnxt/bnxt_hsi.h __le16 ring_id; ring_id 5164 drivers/net/ethernet/broadcom/bnxt/bnxt_hsi.h __le16 ring_id; ring_id 5192 drivers/net/ethernet/broadcom/bnxt/bnxt_hsi.h __le16 ring_id; ring_id 5260 drivers/net/ethernet/broadcom/bnxt/bnxt_hsi.h __le16 ring_id; ring_id 5291 drivers/net/ethernet/broadcom/bnxt/bnxt_hsi.h __le16 ring_id; ring_id 494 drivers/net/ethernet/intel/i40e/i40e_debugfs.c static void i40e_dbg_dump_desc(int cnt, int vsi_seid, int ring_id, int desc_n, ring_id 508 drivers/net/ethernet/intel/i40e/i40e_debugfs.c if (ring_id >= vsi->num_queue_pairs || ring_id < 0) { ring_id 509 drivers/net/ethernet/intel/i40e/i40e_debugfs.c dev_info(&pf->pdev->dev, "ring %d not found\n", ring_id); ring_id 520 drivers/net/ethernet/intel/i40e/i40e_debugfs.c ? vsi->rx_rings[ring_id] : vsi->tx_rings[ring_id], ring_id 527 drivers/net/ethernet/intel/i40e/i40e_debugfs.c vsi_seid, is_rx_ring ? "rx" : "tx", ring_id); ring_id 554 drivers/net/ethernet/intel/i40e/i40e_debugfs.c vsi_seid, ring_id, desc_n, ring_id 560 drivers/net/ethernet/intel/i40e/i40e_debugfs.c vsi_seid, ring_id, desc_n, ring_id 918 drivers/net/ethernet/intel/i40e/i40e_debugfs.c int ring_id, desc_n; ring_id 921 drivers/net/ethernet/intel/i40e/i40e_debugfs.c &vsi_seid, &ring_id, &desc_n); ring_id 922 drivers/net/ethernet/intel/i40e/i40e_debugfs.c i40e_dbg_dump_desc(cnt, vsi_seid, ring_id, ring_id 927 drivers/net/ethernet/intel/i40e/i40e_debugfs.c &vsi_seid, &ring_id, &desc_n); ring_id 928 drivers/net/ethernet/intel/i40e/i40e_debugfs.c i40e_dbg_dump_desc(cnt, vsi_seid, ring_id, ring_id 1619 drivers/net/ethernet/qlogic/qlcnic/qlcnic.h struct qlcnic_host_rds_ring *rds_ring, u8 ring_id); ring_id 143 drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c u16 handle, u8 ring_id) ring_id 146 drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c return handle | (ring_id << 15); ring_id 846 drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c u8 ring_id) ring_id 872 drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c buffer->ref_handle, ring_id); ring_id 1438 drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c struct qlcnic_host_rds_ring *rds_ring, u8 ring_id) ring_id 1465 drivers/net/ethernet/qlogic/qlcnic/qlcnic_io.c ring_id); ring_id 1233 drivers/net/ethernet/socionext/netsec.c static int netsec_alloc_dring(struct netsec_priv *priv, enum ring_id id) ring_id 102 drivers/net/wireless/ath/wil6210/debugfs.c int ring_id = ring->is_rx ? ring_id 108 drivers/net/wireless/ath/wil6210/debugfs.c x = wmi_addr(wil, RGF_DMA_SCM_SUBQ_CONS + 4 * (ring_id / 2)); ring_id 111 drivers/net/wireless/ath/wil6210/debugfs.c v = (ring_id % 2 ? (v >> 16) : (v & 0xffff)); ring_id 271 drivers/net/wireless/ath/wil6210/trace.h __field(u8, ring_id) ring_id 279 drivers/net/wireless/ath/wil6210/trace.h __entry->ring_id = msg->ring_id; ring_id 285 drivers/net/wireless/ath/wil6210/trace.h __entry->ring_id, __entry->index, __entry->len, ring_id 1268 drivers/net/wireless/ath/wil6210/txrx.c static int wil_tx_vring_modify(struct wil6210_vif *vif, int ring_id, int cid, ring_id 1281 drivers/net/wireless/ath/wil6210/txrx.c .ringid = ring_id, ring_id 1299 drivers/net/wireless/ath/wil6210/txrx.c struct wil_ring *vring = &wil->ring_tx[ring_id]; ring_id 1300 drivers/net/wireless/ath/wil6210/txrx.c struct wil_ring_tx_data *txdata = &wil->ring_tx_data[ring_id]; ring_id 1302 drivers/net/wireless/ath/wil6210/txrx.c wil_dbg_misc(wil, "vring_modify: ring %d cid %d tid %d\n", ring_id, ring_id 1307 drivers/net/wireless/ath/wil6210/txrx.c wil_err(wil, "Tx ring [%d] not allocated\n", ring_id); ring_id 1311 drivers/net/wireless/ath/wil6210/txrx.c if (wil->ring2cid_tid[ring_id][0] != cid || ring_id 1312 drivers/net/wireless/ath/wil6210/txrx.c wil->ring2cid_tid[ring_id][1] != tid) { ring_id 1314 drivers/net/wireless/ath/wil6210/txrx.c wil->ring2cid_tid[ring_id][0], ring_id 1315 drivers/net/wireless/ath/wil6210/txrx.c wil->ring2cid_tid[ring_id][1]); ring_id 1338 drivers/net/wireless/ath/wil6210/txrx.c wil_addba_tx_request(wil, ring_id, agg_wsize); ring_id 1346 drivers/net/wireless/ath/wil6210/txrx.c wil->ring2cid_tid[ring_id][0] = wil->max_assoc_sta; ring_id 1347 drivers/net/wireless/ath/wil6210/txrx.c wil->ring2cid_tid[ring_id][1] = 0; ring_id 119 drivers/net/wireless/ath/wil6210/txrx_edma.c int ring_id = wil_find_free_sring(wil); ring_id 131 drivers/net/wireless/ath/wil6210/txrx_edma.c status_ring_size, ring_id); ring_id 133 drivers/net/wireless/ath/wil6210/txrx_edma.c if (ring_id < 0) ring_id 134 drivers/net/wireless/ath/wil6210/txrx_edma.c return ring_id; ring_id 139 drivers/net/wireless/ath/wil6210/txrx_edma.c sring = &wil->srings[ring_id]; ring_id 148 drivers/net/wireless/ath/wil6210/txrx_edma.c rc = wil_wmi_tx_sring_cfg(wil, ring_id); ring_id 153 drivers/net/wireless/ath/wil6210/txrx_edma.c wil->tx_sring_idx = ring_id; ring_id 357 drivers/net/wireless/ath/wil6210/txrx_edma.c u16 ring_id) ring_id 359 drivers/net/wireless/ath/wil6210/txrx_edma.c struct wil_status_ring *sring = &wil->srings[ring_id]; ring_id 363 drivers/net/wireless/ath/wil6210/txrx_edma.c status_ring_size, ring_id); ring_id 374 drivers/net/wireless/ath/wil6210/txrx_edma.c rc = wil_wmi_rx_sring_add(wil, ring_id); ring_id 709 drivers/net/wireless/ath/wil6210/txrx_edma.c static int wil_ring_init_tx_edma(struct wil6210_vif *vif, int ring_id, ring_id 714 drivers/net/wireless/ath/wil6210/txrx_edma.c struct wil_ring *ring = &wil->ring_tx[ring_id]; ring_id 715 drivers/net/wireless/ath/wil6210/txrx_edma.c struct wil_ring_tx_data *txdata = &wil->ring_tx_data[ring_id]; ring_id 721 drivers/net/wireless/ath/wil6210/txrx_edma.c ring_id, cid, tid, wil->tx_sring_idx); ring_id 729 drivers/net/wireless/ath/wil6210/txrx_edma.c wil->ring2cid_tid[ring_id][0] = cid; ring_id 730 drivers/net/wireless/ath/wil6210/txrx_edma.c wil->ring2cid_tid[ring_id][1] = tid; ring_id 734 drivers/net/wireless/ath/wil6210/txrx_edma.c rc = wil_wmi_tx_desc_ring_add(vif, ring_id, cid, tid); ring_id 741 drivers/net/wireless/ath/wil6210/txrx_edma.c wil_addba_tx_request(wil, ring_id, agg_wsize); ring_id 750 drivers/net/wireless/ath/wil6210/txrx_edma.c wil->ring2cid_tid[ring_id][0] = wil->max_assoc_sta; ring_id 751 drivers/net/wireless/ath/wil6210/txrx_edma.c wil->ring2cid_tid[ring_id][1] = 0; ring_id 757 drivers/net/wireless/ath/wil6210/txrx_edma.c static int wil_tx_ring_modify_edma(struct wil6210_vif *vif, int ring_id, ring_id 1172 drivers/net/wireless/ath/wil6210/txrx_edma.c unsigned int ring_id; ring_id 1192 drivers/net/wireless/ath/wil6210/txrx_edma.c ring_id = msg.ring_id; ring_id 1194 drivers/net/wireless/ath/wil6210/txrx_edma.c if (unlikely(ring_id >= WIL6210_MAX_TX_RINGS)) { ring_id 1195 drivers/net/wireless/ath/wil6210/txrx_edma.c wil_err(wil, "invalid ring id %d\n", ring_id); ring_id 1198 drivers/net/wireless/ath/wil6210/txrx_edma.c ring = &wil->ring_tx[ring_id]; ring_id 1201 drivers/net/wireless/ath/wil6210/txrx_edma.c ring_id); ring_id 1204 drivers/net/wireless/ath/wil6210/txrx_edma.c txdata = &wil->ring_tx_data[ring_id]; ring_id 1206 drivers/net/wireless/ath/wil6210/txrx_edma.c wil_info(wil, "Tx irq[%d]: ring disabled\n", ring_id); ring_id 1212 drivers/net/wireless/ath/wil6210/txrx_edma.c txdata->mid, ring_id); ring_id 1218 drivers/net/wireless/ath/wil6210/txrx_edma.c cid = wil->ring2cid_tid[ring_id][0]; ring_id 1224 drivers/net/wireless/ath/wil6210/txrx_edma.c ring_id, num_descs); ring_id 1242 drivers/net/wireless/ath/wil6210/txrx_edma.c ring_id, ring->swtail, dmalen, ring_id 1292 drivers/net/wireless/ath/wil6210/txrx_edma.c ring_id, used_before_complete, used_new); ring_id 1549 drivers/net/wireless/ath/wil6210/txrx_edma.c static int wil_ring_init_bcast_edma(struct wil6210_vif *vif, int ring_id, ring_id 1553 drivers/net/wireless/ath/wil6210/txrx_edma.c struct wil_ring *ring = &wil->ring_tx[ring_id]; ring_id 1555 drivers/net/wireless/ath/wil6210/txrx_edma.c struct wil_ring_tx_data *txdata = &wil->ring_tx_data[ring_id]; ring_id 1558 drivers/net/wireless/ath/wil6210/txrx_edma.c ring_id, wil->tx_sring_idx); ring_id 1569 drivers/net/wireless/ath/wil6210/txrx_edma.c wil->ring2cid_tid[ring_id][0] = WIL6210_MAX_CID; /* CID */ ring_id 1570 drivers/net/wireless/ath/wil6210/txrx_edma.c wil->ring2cid_tid[ring_id][1] = 0; /* TID */ ring_id 1574 drivers/net/wireless/ath/wil6210/txrx_edma.c rc = wil_wmi_bcast_desc_ring_add(vif, ring_id); ring_id 238 drivers/net/wireless/ath/wil6210/txrx_edma.h u8 ring_id; ring_id 605 drivers/net/wireless/ath/wil6210/wil6210.h int (*ring_init_tx)(struct wil6210_vif *vif, int ring_id, ring_id 618 drivers/net/wireless/ath/wil6210/wil6210.h int (*tx_ring_modify)(struct wil6210_vif *vif, int ring_id, ring_id 1439 drivers/net/wireless/ath/wil6210/wil6210.h int wil_wmi_tx_sring_cfg(struct wil6210_priv *wil, int ring_id); ring_id 1442 drivers/net/wireless/ath/wil6210/wil6210.h int wil_wmi_rx_sring_add(struct wil6210_priv *wil, u16 ring_id); ring_id 1444 drivers/net/wireless/ath/wil6210/wil6210.h int wil_wmi_tx_desc_ring_add(struct wil6210_vif *vif, int ring_id, int cid, ring_id 1446 drivers/net/wireless/ath/wil6210/wil6210.h int wil_wmi_bcast_desc_ring_add(struct wil6210_vif *vif, int ring_id); ring_id 2777 drivers/net/wireless/ath/wil6210/wmi.c .ring_id = ringid, ring_id 2792 drivers/net/wireless/ath/wil6210/wmi.c .ring_id = ringid, ring_id 3701 drivers/net/wireless/ath/wil6210/wmi.c int wil_wmi_tx_sring_cfg(struct wil6210_priv *wil, int ring_id) ring_id 3705 drivers/net/wireless/ath/wil6210/wmi.c struct wil_status_ring *sring = &wil->srings[ring_id]; ring_id 3719 drivers/net/wireless/ath/wil6210/wmi.c cmd.ring_cfg.ring_id = ring_id; ring_id 3777 drivers/net/wireless/ath/wil6210/wmi.c int wil_wmi_rx_sring_add(struct wil6210_priv *wil, u16 ring_id) ring_id 3781 drivers/net/wireless/ath/wil6210/wmi.c struct wil_status_ring *sring = &wil->srings[ring_id]; ring_id 3786 drivers/net/wireless/ath/wil6210/wmi.c .ring_id = ring_id, ring_id 3829 drivers/net/wireless/ath/wil6210/wmi.c .ring_id = WIL_RX_DESC_RING_ID, ring_id 3862 drivers/net/wireless/ath/wil6210/wmi.c int wil_wmi_tx_desc_ring_add(struct wil6210_vif *vif, int ring_id, int cid, ring_id 3868 drivers/net/wireless/ath/wil6210/wmi.c struct wil_ring *ring = &wil->ring_tx[ring_id]; ring_id 3869 drivers/net/wireless/ath/wil6210/wmi.c struct wil_ring_tx_data *txdata = &wil->ring_tx_data[ring_id]; ring_id 3873 drivers/net/wireless/ath/wil6210/wmi.c .ring_id = ring_id, ring_id 3916 drivers/net/wireless/ath/wil6210/wmi.c int wil_wmi_bcast_desc_ring_add(struct wil6210_vif *vif, int ring_id) ring_id 3919 drivers/net/wireless/ath/wil6210/wmi.c struct wil_ring *ring = &wil->ring_tx[ring_id]; ring_id 3924 drivers/net/wireless/ath/wil6210/wmi.c .ring_id = ring_id, ring_id 3936 drivers/net/wireless/ath/wil6210/wmi.c struct wil_ring_tx_data *txdata = &wil->ring_tx_data[ring_id]; ring_id 1000 drivers/net/wireless/ath/wil6210/wmi.h u8 ring_id; ring_id 1099 drivers/net/wireless/ath/wil6210/wmi.h u8 ring_id; ring_id 1108 drivers/net/wireless/ath/wil6210/wmi.h u8 ring_id; ring_id 2570 drivers/net/wireless/ath/wil6210/wmi.h u8 ring_id; ring_id 2579 drivers/net/wireless/ath/wil6210/wmi.h u8 ring_id; ring_id 2595 drivers/net/wireless/ath/wil6210/wmi.h u8 ring_id; ring_id 2604 drivers/net/wireless/ath/wil6210/wmi.h u8 ring_id; ring_id 1034 drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c brcmf_pcie_alloc_dma_and_ring(struct brcmf_pciedev_info *devinfo, u32 ring_id, ring_id 1049 drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c size = brcmf_ring_max_item[ring_id] * ring_itemsize_array[ring_id]; ring_id 1057 drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c brcmf_pcie_write_tcm16(devinfo, addr, brcmf_ring_max_item[ring_id]); ring_id 1059 drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c brcmf_pcie_write_tcm16(devinfo, addr, ring_itemsize_array[ring_id]); ring_id 1067 drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c brcmf_commonring_config(&ring->commonring, brcmf_ring_max_item[ring_id], ring_id 1068 drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c ring_itemsize_array[ring_id], dma_buf); ring_id 1714 drivers/scsi/be2iscsi/be_main.c u32 ring_id, doorbell = 0; ring_id 1723 drivers/scsi/be2iscsi/be_main.c ring_id = phwi_ctrlr->default_pdu_hdr[ulp_num].id; ring_id 1729 drivers/scsi/be2iscsi/be_main.c ring_id = phwi_ctrlr->default_pdu_data[ulp_num].id; ring_id 1756 drivers/scsi/be2iscsi/be_main.c doorbell |= ring_id & DB_DEF_PDU_RING_ID_MASK;