Lines Matching refs:ring
3374 struct radeon_ring *ring = &rdev->ring[fence->ring]; in si_fence_ring_emit() local
3375 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in si_fence_ring_emit()
3378 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1)); in si_fence_ring_emit()
3379 radeon_ring_write(ring, (CP_COHER_CNTL2 - PACKET3_SET_CONFIG_REG_START) >> 2); in si_fence_ring_emit()
3380 radeon_ring_write(ring, 0); in si_fence_ring_emit()
3381 radeon_ring_write(ring, PACKET3(PACKET3_SURFACE_SYNC, 3)); in si_fence_ring_emit()
3382 radeon_ring_write(ring, PACKET3_TCL1_ACTION_ENA | in si_fence_ring_emit()
3386 radeon_ring_write(ring, 0xFFFFFFFF); in si_fence_ring_emit()
3387 radeon_ring_write(ring, 0); in si_fence_ring_emit()
3388 radeon_ring_write(ring, 10); /* poll interval */ in si_fence_ring_emit()
3390 radeon_ring_write(ring, PACKET3(PACKET3_EVENT_WRITE_EOP, 4)); in si_fence_ring_emit()
3391 radeon_ring_write(ring, EVENT_TYPE(CACHE_FLUSH_AND_INV_TS_EVENT) | EVENT_INDEX(5)); in si_fence_ring_emit()
3392 radeon_ring_write(ring, lower_32_bits(addr)); in si_fence_ring_emit()
3393 radeon_ring_write(ring, (upper_32_bits(addr) & 0xff) | DATA_SEL(1) | INT_SEL(2)); in si_fence_ring_emit()
3394 radeon_ring_write(ring, fence->seq); in si_fence_ring_emit()
3395 radeon_ring_write(ring, 0); in si_fence_ring_emit()
3403 struct radeon_ring *ring = &rdev->ring[ib->ring]; in si_ring_ib_execute() local
3404 unsigned vm_id = ib->vm ? ib->vm->ids[ib->ring].id : 0; in si_ring_ib_execute()
3409 radeon_ring_write(ring, PACKET3(PACKET3_SWITCH_BUFFER, 0)); in si_ring_ib_execute()
3410 radeon_ring_write(ring, 0); in si_ring_ib_execute()
3415 if (ring->rptr_save_reg) { in si_ring_ib_execute()
3416 next_rptr = ring->wptr + 3 + 4 + 8; in si_ring_ib_execute()
3417 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1)); in si_ring_ib_execute()
3418 radeon_ring_write(ring, ((ring->rptr_save_reg - in si_ring_ib_execute()
3420 radeon_ring_write(ring, next_rptr); in si_ring_ib_execute()
3422 next_rptr = ring->wptr + 5 + 4 + 8; in si_ring_ib_execute()
3423 radeon_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3)); in si_ring_ib_execute()
3424 radeon_ring_write(ring, (1 << 8)); in si_ring_ib_execute()
3425 radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc); in si_ring_ib_execute()
3426 radeon_ring_write(ring, upper_32_bits(ring->next_rptr_gpu_addr)); in si_ring_ib_execute()
3427 radeon_ring_write(ring, next_rptr); in si_ring_ib_execute()
3433 radeon_ring_write(ring, header); in si_ring_ib_execute()
3434 radeon_ring_write(ring, in si_ring_ib_execute()
3439 radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFFFF); in si_ring_ib_execute()
3440 radeon_ring_write(ring, ib->length_dw | (vm_id << 24)); in si_ring_ib_execute()
3444 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1)); in si_ring_ib_execute()
3445 radeon_ring_write(ring, (CP_COHER_CNTL2 - PACKET3_SET_CONFIG_REG_START) >> 2); in si_ring_ib_execute()
3446 radeon_ring_write(ring, vm_id); in si_ring_ib_execute()
3447 radeon_ring_write(ring, PACKET3(PACKET3_SURFACE_SYNC, 3)); in si_ring_ib_execute()
3448 radeon_ring_write(ring, PACKET3_TCL1_ACTION_ENA | in si_ring_ib_execute()
3452 radeon_ring_write(ring, 0xFFFFFFFF); in si_ring_ib_execute()
3453 radeon_ring_write(ring, 0); in si_ring_ib_execute()
3454 radeon_ring_write(ring, 10); /* poll interval */ in si_ring_ib_execute()
3470 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = false; in si_cp_enable()
3471 rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX].ready = false; in si_cp_enable()
3472 rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX].ready = false; in si_cp_enable()
3560 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; in si_cp_start() local
3563 r = radeon_ring_lock(rdev, ring, 7 + 4); in si_cp_start()
3569 radeon_ring_write(ring, PACKET3(PACKET3_ME_INITIALIZE, 5)); in si_cp_start()
3570 radeon_ring_write(ring, 0x1); in si_cp_start()
3571 radeon_ring_write(ring, 0x0); in si_cp_start()
3572 radeon_ring_write(ring, rdev->config.si.max_hw_contexts - 1); in si_cp_start()
3573 radeon_ring_write(ring, PACKET3_ME_INITIALIZE_DEVICE_ID(1)); in si_cp_start()
3574 radeon_ring_write(ring, 0); in si_cp_start()
3575 radeon_ring_write(ring, 0); in si_cp_start()
3578 radeon_ring_write(ring, PACKET3(PACKET3_SET_BASE, 2)); in si_cp_start()
3579 radeon_ring_write(ring, PACKET3_BASE_INDEX(CE_PARTITION_BASE)); in si_cp_start()
3580 radeon_ring_write(ring, 0xc000); in si_cp_start()
3581 radeon_ring_write(ring, 0xe000); in si_cp_start()
3582 radeon_ring_unlock_commit(rdev, ring, false); in si_cp_start()
3586 r = radeon_ring_lock(rdev, ring, si_default_size + 10); in si_cp_start()
3593 radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0)); in si_cp_start()
3594 radeon_ring_write(ring, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE); in si_cp_start()
3597 radeon_ring_write(ring, si_default_state[i]); in si_cp_start()
3599 radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0)); in si_cp_start()
3600 radeon_ring_write(ring, PACKET3_PREAMBLE_END_CLEAR_STATE); in si_cp_start()
3603 radeon_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0)); in si_cp_start()
3604 radeon_ring_write(ring, 0); in si_cp_start()
3606 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONTEXT_REG, 2)); in si_cp_start()
3607 radeon_ring_write(ring, 0x00000316); in si_cp_start()
3608 radeon_ring_write(ring, 0x0000000e); /* VGT_VERTEX_REUSE_BLOCK_CNTL */ in si_cp_start()
3609 radeon_ring_write(ring, 0x00000010); /* VGT_OUT_DEALLOC_CNTL */ in si_cp_start()
3611 radeon_ring_unlock_commit(rdev, ring, false); in si_cp_start()
3614 ring = &rdev->ring[i]; in si_cp_start()
3615 r = radeon_ring_lock(rdev, ring, 2); in si_cp_start()
3618 radeon_ring_write(ring, PACKET3_COMPUTE(PACKET3_CLEAR_STATE, 0)); in si_cp_start()
3619 radeon_ring_write(ring, 0); in si_cp_start()
3621 radeon_ring_unlock_commit(rdev, ring, false); in si_cp_start()
3629 struct radeon_ring *ring; in si_cp_fini() local
3632 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; in si_cp_fini()
3633 radeon_ring_fini(rdev, ring); in si_cp_fini()
3634 radeon_scratch_free(rdev, ring->rptr_save_reg); in si_cp_fini()
3636 ring = &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX]; in si_cp_fini()
3637 radeon_ring_fini(rdev, ring); in si_cp_fini()
3638 radeon_scratch_free(rdev, ring->rptr_save_reg); in si_cp_fini()
3640 ring = &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX]; in si_cp_fini()
3641 radeon_ring_fini(rdev, ring); in si_cp_fini()
3642 radeon_scratch_free(rdev, ring->rptr_save_reg); in si_cp_fini()
3647 struct radeon_ring *ring; in si_cp_resume() local
3665 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; in si_cp_resume()
3666 rb_bufsz = order_base_2(ring->ring_size / 8); in si_cp_resume()
3675 ring->wptr = 0; in si_cp_resume()
3676 WREG32(CP_RB0_WPTR, ring->wptr); in si_cp_resume()
3692 WREG32(CP_RB0_BASE, ring->gpu_addr >> 8); in si_cp_resume()
3696 ring = &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX]; in si_cp_resume()
3697 rb_bufsz = order_base_2(ring->ring_size / 8); in si_cp_resume()
3706 ring->wptr = 0; in si_cp_resume()
3707 WREG32(CP_RB1_WPTR, ring->wptr); in si_cp_resume()
3716 WREG32(CP_RB1_BASE, ring->gpu_addr >> 8); in si_cp_resume()
3720 ring = &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX]; in si_cp_resume()
3721 rb_bufsz = order_base_2(ring->ring_size / 8); in si_cp_resume()
3730 ring->wptr = 0; in si_cp_resume()
3731 WREG32(CP_RB2_WPTR, ring->wptr); in si_cp_resume()
3740 WREG32(CP_RB2_BASE, ring->gpu_addr >> 8); in si_cp_resume()
3744 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = true; in si_cp_resume()
3745 rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX].ready = true; in si_cp_resume()
3746 rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX].ready = true; in si_cp_resume()
3747 r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]); in si_cp_resume()
3749 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = false; in si_cp_resume()
3750 rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX].ready = false; in si_cp_resume()
3751 rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX].ready = false; in si_cp_resume()
3754 r = radeon_ring_test(rdev, CAYMAN_RING_TYPE_CP1_INDEX, &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX]); in si_cp_resume()
3756 rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX].ready = false; in si_cp_resume()
3758 r = radeon_ring_test(rdev, CAYMAN_RING_TYPE_CP2_INDEX, &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX]); in si_cp_resume()
3760 rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX].ready = false; in si_cp_resume()
4119 bool si_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring) in si_gfx_is_lockup() argument
4126 radeon_ring_lockup_update(rdev, ring); in si_gfx_is_lockup()
4129 return radeon_ring_test_lockup(rdev, ring); in si_gfx_is_lockup()
4747 switch (ib->ring) { in si_ib_parse()
4756 dev_err(rdev->dev, "Non-PM4 ring %d !\n", ib->ring); in si_ib_parse()
5063 void si_vm_flush(struct radeon_device *rdev, struct radeon_ring *ring, in si_vm_flush() argument
5067 radeon_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3)); in si_vm_flush()
5068 radeon_ring_write(ring, (WRITE_DATA_ENGINE_SEL(1) | in si_vm_flush()
5072 radeon_ring_write(ring, in si_vm_flush()
5075 radeon_ring_write(ring, in si_vm_flush()
5078 radeon_ring_write(ring, 0); in si_vm_flush()
5079 radeon_ring_write(ring, pd_addr >> 12); in si_vm_flush()
5082 radeon_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3)); in si_vm_flush()
5083 radeon_ring_write(ring, (WRITE_DATA_ENGINE_SEL(1) | in si_vm_flush()
5085 radeon_ring_write(ring, HDP_MEM_COHERENCY_FLUSH_CNTL >> 2); in si_vm_flush()
5086 radeon_ring_write(ring, 0); in si_vm_flush()
5087 radeon_ring_write(ring, 0x1); in si_vm_flush()
5090 radeon_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3)); in si_vm_flush()
5091 radeon_ring_write(ring, (WRITE_DATA_ENGINE_SEL(1) | in si_vm_flush()
5093 radeon_ring_write(ring, VM_INVALIDATE_REQUEST >> 2); in si_vm_flush()
5094 radeon_ring_write(ring, 0); in si_vm_flush()
5095 radeon_ring_write(ring, 1 << vm_id); in si_vm_flush()
5098 radeon_ring_write(ring, PACKET3(PACKET3_WAIT_REG_MEM, 5)); in si_vm_flush()
5099 radeon_ring_write(ring, (WAIT_REG_MEM_FUNCTION(0) | /* always */ in si_vm_flush()
5101 radeon_ring_write(ring, VM_INVALIDATE_REQUEST >> 2); in si_vm_flush()
5102 radeon_ring_write(ring, 0); in si_vm_flush()
5103 radeon_ring_write(ring, 0); /* ref */ in si_vm_flush()
5104 radeon_ring_write(ring, 0); /* mask */ in si_vm_flush()
5105 radeon_ring_write(ring, 0x20); /* poll interval */ in si_vm_flush()
5108 radeon_ring_write(ring, PACKET3(PACKET3_PFP_SYNC_ME, 0)); in si_vm_flush()
5109 radeon_ring_write(ring, 0x0); in si_vm_flush()
6464 src_id = le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff; in si_irq_process()
6465 src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff; in si_irq_process()
6466 ring_id = le32_to_cpu(rdev->ih.ring[ring_index + 2]) & 0xff; in si_irq_process()
6873 struct radeon_ring *ring; in si_startup() local
6958 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0; in si_startup()
6973 rdev->ring[TN_RING_TYPE_VCE1_INDEX].ring_size = 0; in si_startup()
6974 rdev->ring[TN_RING_TYPE_VCE2_INDEX].ring_size = 0; in si_startup()
6992 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; in si_startup()
6993 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET, in si_startup()
6998 ring = &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX]; in si_startup()
6999 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP1_RPTR_OFFSET, in si_startup()
7004 ring = &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX]; in si_startup()
7005 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP2_RPTR_OFFSET, in si_startup()
7010 ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX]; in si_startup()
7011 r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET, in si_startup()
7016 ring = &rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX]; in si_startup()
7017 r = radeon_ring_init(rdev, ring, ring->ring_size, CAYMAN_WB_DMA1_RPTR_OFFSET, in si_startup()
7034 ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX]; in si_startup()
7035 if (ring->ring_size) { in si_startup()
7036 r = radeon_ring_init(rdev, ring, ring->ring_size, 0, in si_startup()
7047 ring = &rdev->ring[TN_RING_TYPE_VCE1_INDEX]; in si_startup()
7048 if (ring->ring_size) in si_startup()
7049 r = radeon_ring_init(rdev, ring, ring->ring_size, 0, in si_startup()
7052 ring = &rdev->ring[TN_RING_TYPE_VCE2_INDEX]; in si_startup()
7053 if (ring->ring_size) in si_startup()
7054 r = radeon_ring_init(rdev, ring, ring->ring_size, 0, in si_startup()
7138 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; in si_init() local
7199 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; in si_init()
7200 ring->ring_obj = NULL; in si_init()
7201 r600_ring_init(rdev, ring, 1024 * 1024); in si_init()
7203 ring = &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX]; in si_init()
7204 ring->ring_obj = NULL; in si_init()
7205 r600_ring_init(rdev, ring, 1024 * 1024); in si_init()
7207 ring = &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX]; in si_init()
7208 ring->ring_obj = NULL; in si_init()
7209 r600_ring_init(rdev, ring, 1024 * 1024); in si_init()
7211 ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX]; in si_init()
7212 ring->ring_obj = NULL; in si_init()
7213 r600_ring_init(rdev, ring, 64 * 1024); in si_init()
7215 ring = &rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX]; in si_init()
7216 ring->ring_obj = NULL; in si_init()
7217 r600_ring_init(rdev, ring, 64 * 1024); in si_init()
7222 ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX]; in si_init()
7223 ring->ring_obj = NULL; in si_init()
7224 r600_ring_init(rdev, ring, 4096); in si_init()
7230 ring = &rdev->ring[TN_RING_TYPE_VCE1_INDEX]; in si_init()
7231 ring->ring_obj = NULL; in si_init()
7232 r600_ring_init(rdev, ring, 4096); in si_init()
7234 ring = &rdev->ring[TN_RING_TYPE_VCE2_INDEX]; in si_init()
7235 ring->ring_obj = NULL; in si_init()
7236 r600_ring_init(rdev, ring, 4096); in si_init()