Lines Matching refs:ring

61 static void amdgpu_fence_write(struct amdgpu_ring *ring, u32 seq)  in amdgpu_fence_write()  argument
63 struct amdgpu_fence_driver *drv = &ring->fence_drv; in amdgpu_fence_write()
77 static u32 amdgpu_fence_read(struct amdgpu_ring *ring) in amdgpu_fence_read() argument
79 struct amdgpu_fence_driver *drv = &ring->fence_drv; in amdgpu_fence_read()
100 int amdgpu_fence_emit(struct amdgpu_ring *ring, void *owner, in amdgpu_fence_emit() argument
103 struct amdgpu_device *adev = ring->adev; in amdgpu_fence_emit()
110 (*fence)->seq = ++ring->fence_drv.sync_seq[ring->idx]; in amdgpu_fence_emit()
111 (*fence)->ring = ring; in amdgpu_fence_emit()
114 &ring->fence_drv.fence_queue.lock, in amdgpu_fence_emit()
115 adev->fence_context + ring->idx, in amdgpu_fence_emit()
117 amdgpu_ring_emit_fence(ring, ring->fence_drv.gpu_addr, in amdgpu_fence_emit()
130 static void amdgpu_fence_schedule_fallback(struct amdgpu_ring *ring) in amdgpu_fence_schedule_fallback() argument
132 mod_timer(&ring->fence_drv.fallback_timer, in amdgpu_fence_schedule_fallback()
145 static bool amdgpu_fence_activity(struct amdgpu_ring *ring) in amdgpu_fence_activity() argument
172 last_seq = atomic64_read(&ring->fence_drv.last_seq); in amdgpu_fence_activity()
174 last_emitted = ring->fence_drv.sync_seq[ring->idx]; in amdgpu_fence_activity()
175 seq = amdgpu_fence_read(ring); in amdgpu_fence_activity()
199 } while (atomic64_xchg(&ring->fence_drv.last_seq, seq) > seq); in amdgpu_fence_activity()
202 amdgpu_fence_schedule_fallback(ring); in amdgpu_fence_activity()
216 void amdgpu_fence_process(struct amdgpu_ring *ring) in amdgpu_fence_process() argument
218 if (amdgpu_fence_activity(ring)) in amdgpu_fence_process()
219 wake_up_all(&ring->fence_drv.fence_queue); in amdgpu_fence_process()
231 struct amdgpu_ring *ring = (void *)arg; in amdgpu_fence_fallback() local
233 amdgpu_fence_process(ring); in amdgpu_fence_fallback()
249 static bool amdgpu_fence_seq_signaled(struct amdgpu_ring *ring, u64 seq) in amdgpu_fence_seq_signaled() argument
251 if (atomic64_read(&ring->fence_drv.last_seq) >= seq) in amdgpu_fence_seq_signaled()
255 amdgpu_fence_process(ring); in amdgpu_fence_seq_signaled()
256 if (atomic64_read(&ring->fence_drv.last_seq) >= seq) in amdgpu_fence_seq_signaled()
272 static int amdgpu_fence_ring_wait_seq(struct amdgpu_ring *ring, uint64_t seq) in amdgpu_fence_ring_wait_seq() argument
276 BUG_ON(!ring); in amdgpu_fence_ring_wait_seq()
277 if (seq > ring->fence_drv.sync_seq[ring->idx]) in amdgpu_fence_ring_wait_seq()
280 if (atomic64_read(&ring->fence_drv.last_seq) >= seq) in amdgpu_fence_ring_wait_seq()
283 amdgpu_fence_schedule_fallback(ring); in amdgpu_fence_ring_wait_seq()
284 wait_event(ring->fence_drv.fence_queue, ( in amdgpu_fence_ring_wait_seq()
285 (signaled = amdgpu_fence_seq_signaled(ring, seq)))); in amdgpu_fence_ring_wait_seq()
303 int amdgpu_fence_wait_next(struct amdgpu_ring *ring) in amdgpu_fence_wait_next() argument
305 uint64_t seq = atomic64_read(&ring->fence_drv.last_seq) + 1ULL; in amdgpu_fence_wait_next()
307 if (seq >= ring->fence_drv.sync_seq[ring->idx]) in amdgpu_fence_wait_next()
310 return amdgpu_fence_ring_wait_seq(ring, seq); in amdgpu_fence_wait_next()
323 int amdgpu_fence_wait_empty(struct amdgpu_ring *ring) in amdgpu_fence_wait_empty() argument
325 uint64_t seq = ring->fence_drv.sync_seq[ring->idx]; in amdgpu_fence_wait_empty()
330 return amdgpu_fence_ring_wait_seq(ring, seq); in amdgpu_fence_wait_empty()
342 unsigned amdgpu_fence_count_emitted(struct amdgpu_ring *ring) in amdgpu_fence_count_emitted() argument
349 amdgpu_fence_process(ring); in amdgpu_fence_count_emitted()
350 emitted = ring->fence_drv.sync_seq[ring->idx] in amdgpu_fence_count_emitted()
351 - atomic64_read(&ring->fence_drv.last_seq); in amdgpu_fence_count_emitted()
378 if (fence->ring == dst_ring) in amdgpu_fence_need_sync()
383 if (fence->seq <= fdrv->sync_seq[fence->ring->idx]) in amdgpu_fence_need_sync()
407 if (fence->ring == dst_ring) in amdgpu_fence_note_sync()
411 src = &fence->ring->fence_drv; in amdgpu_fence_note_sync()
434 int amdgpu_fence_driver_start_ring(struct amdgpu_ring *ring, in amdgpu_fence_driver_start_ring() argument
438 struct amdgpu_device *adev = ring->adev; in amdgpu_fence_driver_start_ring()
441 if (ring != &adev->uvd.ring) { in amdgpu_fence_driver_start_ring()
442 ring->fence_drv.cpu_addr = &adev->wb.wb[ring->fence_offs]; in amdgpu_fence_driver_start_ring()
443 ring->fence_drv.gpu_addr = adev->wb.gpu_addr + (ring->fence_offs * 4); in amdgpu_fence_driver_start_ring()
447 ring->fence_drv.cpu_addr = adev->uvd.cpu_addr + index; in amdgpu_fence_driver_start_ring()
448 ring->fence_drv.gpu_addr = adev->uvd.gpu_addr + index; in amdgpu_fence_driver_start_ring()
450 amdgpu_fence_write(ring, atomic64_read(&ring->fence_drv.last_seq)); in amdgpu_fence_driver_start_ring()
453 ring->fence_drv.irq_src = irq_src; in amdgpu_fence_driver_start_ring()
454 ring->fence_drv.irq_type = irq_type; in amdgpu_fence_driver_start_ring()
455 ring->fence_drv.initialized = true; in amdgpu_fence_driver_start_ring()
458 "cpu addr 0x%p\n", ring->idx, in amdgpu_fence_driver_start_ring()
459 ring->fence_drv.gpu_addr, ring->fence_drv.cpu_addr); in amdgpu_fence_driver_start_ring()
472 int amdgpu_fence_driver_init_ring(struct amdgpu_ring *ring) in amdgpu_fence_driver_init_ring() argument
476 ring->fence_drv.cpu_addr = NULL; in amdgpu_fence_driver_init_ring()
477 ring->fence_drv.gpu_addr = 0; in amdgpu_fence_driver_init_ring()
479 ring->fence_drv.sync_seq[i] = 0; in amdgpu_fence_driver_init_ring()
481 atomic64_set(&ring->fence_drv.last_seq, 0); in amdgpu_fence_driver_init_ring()
482 ring->fence_drv.initialized = false; in amdgpu_fence_driver_init_ring()
484 setup_timer(&ring->fence_drv.fallback_timer, amdgpu_fence_fallback, in amdgpu_fence_driver_init_ring()
485 (unsigned long)ring); in amdgpu_fence_driver_init_ring()
487 init_waitqueue_head(&ring->fence_drv.fence_queue); in amdgpu_fence_driver_init_ring()
501 r = amd_sched_init(&ring->sched, &amdgpu_sched_ops, in amdgpu_fence_driver_init_ring()
503 timeout, ring->name); in amdgpu_fence_driver_init_ring()
506 ring->name); in amdgpu_fence_driver_init_ring()
557 struct amdgpu_ring *ring = adev->rings[i]; in amdgpu_fence_driver_fini() local
559 if (!ring || !ring->fence_drv.initialized) in amdgpu_fence_driver_fini()
561 r = amdgpu_fence_wait_empty(ring); in amdgpu_fence_driver_fini()
566 wake_up_all(&ring->fence_drv.fence_queue); in amdgpu_fence_driver_fini()
567 amdgpu_irq_put(adev, ring->fence_drv.irq_src, in amdgpu_fence_driver_fini()
568 ring->fence_drv.irq_type); in amdgpu_fence_driver_fini()
569 amd_sched_fini(&ring->sched); in amdgpu_fence_driver_fini()
570 del_timer_sync(&ring->fence_drv.fallback_timer); in amdgpu_fence_driver_fini()
571 ring->fence_drv.initialized = false; in amdgpu_fence_driver_fini()
590 struct amdgpu_ring *ring = adev->rings[i]; in amdgpu_fence_driver_suspend() local
591 if (!ring || !ring->fence_drv.initialized) in amdgpu_fence_driver_suspend()
595 r = amdgpu_fence_wait_empty(ring); in amdgpu_fence_driver_suspend()
602 amdgpu_irq_put(adev, ring->fence_drv.irq_src, in amdgpu_fence_driver_suspend()
603 ring->fence_drv.irq_type); in amdgpu_fence_driver_suspend()
626 struct amdgpu_ring *ring = adev->rings[i]; in amdgpu_fence_driver_resume() local
627 if (!ring || !ring->fence_drv.initialized) in amdgpu_fence_driver_resume()
631 amdgpu_irq_get(adev, ring->fence_drv.irq_src, in amdgpu_fence_driver_resume()
632 ring->fence_drv.irq_type); in amdgpu_fence_driver_resume()
650 struct amdgpu_ring *ring = adev->rings[i]; in amdgpu_fence_driver_force_completion() local
651 if (!ring || !ring->fence_drv.initialized) in amdgpu_fence_driver_force_completion()
654 amdgpu_fence_write(ring, ring->fence_drv.sync_seq[i]); in amdgpu_fence_driver_force_completion()
670 return (const char *)fence->ring->name; in amdgpu_fence_get_timeline_name()
684 struct amdgpu_ring *ring = fence->ring; in amdgpu_fence_is_signaled() local
686 if (atomic64_read(&ring->fence_drv.last_seq) >= fence->seq) in amdgpu_fence_is_signaled()
689 amdgpu_fence_process(ring); in amdgpu_fence_is_signaled()
691 if (atomic64_read(&ring->fence_drv.last_seq) >= fence->seq) in amdgpu_fence_is_signaled()
712 adev = fence->ring->adev; in amdgpu_fence_check_signaled()
718 seq = atomic64_read(&fence->ring->fence_drv.last_seq); in amdgpu_fence_check_signaled()
726 __remove_wait_queue(&fence->ring->fence_drv.fence_queue, &fence->fence_wake); in amdgpu_fence_check_signaled()
744 struct amdgpu_ring *ring = fence->ring; in amdgpu_fence_enable_signaling() local
746 if (atomic64_read(&ring->fence_drv.last_seq) >= fence->seq) in amdgpu_fence_enable_signaling()
752 __add_wait_queue(&ring->fence_drv.fence_queue, &fence->fence_wake); in amdgpu_fence_enable_signaling()
754 if (!timer_pending(&ring->fence_drv.fallback_timer)) in amdgpu_fence_enable_signaling()
755 amdgpu_fence_schedule_fallback(ring); in amdgpu_fence_enable_signaling()
756 FENCE_TRACE(&fence->base, "armed on ring %i!\n", ring->idx); in amdgpu_fence_enable_signaling()
787 struct amdgpu_ring *ring = adev->rings[i]; in amdgpu_debugfs_fence_info() local
788 if (!ring || !ring->fence_drv.initialized) in amdgpu_debugfs_fence_info()
791 amdgpu_fence_process(ring); in amdgpu_debugfs_fence_info()
793 seq_printf(m, "--- ring %d (%s) ---\n", i, ring->name); in amdgpu_debugfs_fence_info()
795 (unsigned long long)atomic64_read(&ring->fence_drv.last_seq)); in amdgpu_debugfs_fence_info()
797 ring->fence_drv.sync_seq[i]); in amdgpu_debugfs_fence_info()
802 ring->fence_drv.sync_seq[j]) in amdgpu_debugfs_fence_info()
804 j, ring->fence_drv.sync_seq[j]); in amdgpu_debugfs_fence_info()