s_fence 1052 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c struct drm_sched_fence *s_fence; s_fence 1055 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c s_fence = to_drm_sched_fence(fence); s_fence 1056 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c fence = dma_fence_get(&s_fence->scheduled); s_fence 1310 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c p->fence = dma_fence_get(&job->base.s_fence->finished); s_fence 3861 drivers/gpu/drm/amd/amdgpu/amdgpu_device.c if (job && job->base.s_fence->parent && s_fence 3862 drivers/gpu/drm/amd/amdgpu/amdgpu_device.c dma_fence_is_signaled(job->base.s_fence->parent)) s_fence 146 drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c fence_ctx = job->base.s_fence ? s_fence 147 drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c job->base.s_fence->scheduled.context : 0; s_fence 39 drivers/gpu/drm/amd/amdgpu/amdgpu_job.c if (amdgpu_ring_soft_recovery(ring, job->vmid, s_job->s_fence->parent)) { s_fence 112 drivers/gpu/drm/amd/amdgpu/amdgpu_job.c f = job->base.s_fence ? &job->base.s_fence->finished : job->fence; s_fence 157 drivers/gpu/drm/amd/amdgpu/amdgpu_job.c *f = dma_fence_get(&job->base.s_fence->finished); s_fence 205 drivers/gpu/drm/amd/amdgpu/amdgpu_job.c &job->base.s_fence->finished, s_fence 224 drivers/gpu/drm/amd/amdgpu/amdgpu_job.c finished = &job->base.s_fence->finished; s_fence 67 drivers/gpu/drm/amd/amdgpu/amdgpu_sync.c struct drm_sched_fence *s_fence = to_drm_sched_fence(f); s_fence 69 drivers/gpu/drm/amd/amdgpu/amdgpu_sync.c if (s_fence) { s_fence 72 drivers/gpu/drm/amd/amdgpu/amdgpu_sync.c ring = container_of(s_fence->sched, struct amdgpu_ring, sched); s_fence 88 drivers/gpu/drm/amd/amdgpu/amdgpu_sync.c struct drm_sched_fence *s_fence; s_fence 94 drivers/gpu/drm/amd/amdgpu/amdgpu_sync.c s_fence = to_drm_sched_fence(f); s_fence 95 drivers/gpu/drm/amd/amdgpu/amdgpu_sync.c if (s_fence) s_fence 96 drivers/gpu/drm/amd/amdgpu/amdgpu_sync.c return s_fence->owner; s_fence 267 drivers/gpu/drm/amd/amdgpu/amdgpu_sync.c struct drm_sched_fence *s_fence = to_drm_sched_fence(f); s_fence 275 drivers/gpu/drm/amd/amdgpu/amdgpu_sync.c if (ring && s_fence) { s_fence 279 drivers/gpu/drm/amd/amdgpu/amdgpu_sync.c if (s_fence->sched == &ring->sched) { s_fence 280 drivers/gpu/drm/amd/amdgpu/amdgpu_sync.c if (dma_fence_is_signaled(&s_fence->scheduled)) s_fence 283 drivers/gpu/drm/amd/amdgpu/amdgpu_sync.c return &s_fence->scheduled; s_fence 36 drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h job->base.s_fence->finished.ops->get_timeline_name(&job->base.s_fence->finished) s_fence 180 drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h __entry->context = job->base.s_fence->finished.context; s_fence 181 drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h __entry->seqno = job->base.s_fence->finished.seqno; s_fence 205 drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h __entry->context = job->base.s_fence->finished.context; s_fence 206 drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h __entry->seqno = job->base.s_fence->finished.seqno; s_fence 77 drivers/gpu/drm/etnaviv/etnaviv_sched.c if (likely(!sched_job->s_fence->finished.error)) s_fence 160 drivers/gpu/drm/etnaviv/etnaviv_sched.c submit->out_fence = dma_fence_get(&submit->sched_job.s_fence->finished); s_fence 176 drivers/gpu/drm/lima/lima_sched.c struct dma_fence *fence = dma_fence_get(&task->base.s_fence->finished); s_fence 203 drivers/gpu/drm/lima/lima_sched.c if (job->s_fence->finished.error < 0) s_fence 233 drivers/gpu/drm/panfrost/panfrost_job.c job->render_done_fence = dma_fence_get(&job->base.s_fence->finished); s_fence 346 drivers/gpu/drm/panfrost/panfrost_job.c if (unlikely(job->base.s_fence->finished.error)) s_fence 50 drivers/gpu/drm/scheduler/gpu_scheduler_trace.h __entry->fence = &sched_job->s_fence->finished; s_fence 222 drivers/gpu/drm/scheduler/sched_entity.c drm_sched_fence_finished(job->s_fence); s_fence 223 drivers/gpu/drm/scheduler/sched_entity.c WARN_ON(job->s_fence->parent); s_fence 241 drivers/gpu/drm/scheduler/sched_entity.c struct drm_sched_fence *s_fence = job->s_fence; s_fence 243 drivers/gpu/drm/scheduler/sched_entity.c drm_sched_fence_scheduled(s_fence); s_fence 244 drivers/gpu/drm/scheduler/sched_entity.c dma_fence_set_error(&s_fence->finished, -ESRCH); s_fence 401 drivers/gpu/drm/scheduler/sched_entity.c struct drm_sched_fence *s_fence; s_fence 414 drivers/gpu/drm/scheduler/sched_entity.c s_fence = to_drm_sched_fence(fence); s_fence 415 drivers/gpu/drm/scheduler/sched_entity.c if (s_fence && s_fence->sched == sched) { s_fence 421 drivers/gpu/drm/scheduler/sched_entity.c fence = dma_fence_get(&s_fence->scheduled); s_fence 467 drivers/gpu/drm/scheduler/sched_entity.c dma_fence_set_error(&sched_job->s_fence->finished, -ECANCELED); s_fence 470 drivers/gpu/drm/scheduler/sched_entity.c entity->last_scheduled = dma_fence_get(&sched_job->s_fence->finished); s_fence 172 drivers/gpu/drm/scheduler/sched_main.c struct drm_sched_fence *s_fence; s_fence 178 drivers/gpu/drm/scheduler/sched_main.c s_fence = to_drm_sched_fence(fence); s_fence 179 drivers/gpu/drm/scheduler/sched_main.c if (s_fence && s_fence->sched == sched) s_fence 336 drivers/gpu/drm/scheduler/sched_main.c if (bad->s_fence->scheduled.context == s_fence 379 drivers/gpu/drm/scheduler/sched_main.c if (s_job->s_fence->parent && s_fence 380 drivers/gpu/drm/scheduler/sched_main.c dma_fence_remove_callback(s_job->s_fence->parent, s_fence 398 drivers/gpu/drm/scheduler/sched_main.c dma_fence_wait(&s_job->s_fence->finished, false); s_fence 442 drivers/gpu/drm/scheduler/sched_main.c struct dma_fence *fence = s_job->s_fence->parent; s_fence 485 drivers/gpu/drm/scheduler/sched_main.c struct drm_sched_fence *s_fence = s_job->s_fence; s_fence 489 drivers/gpu/drm/scheduler/sched_main.c guilty_context = s_job->s_fence->scheduled.context; s_fence 492 drivers/gpu/drm/scheduler/sched_main.c if (found_guilty && s_job->s_fence->scheduled.context == guilty_context) s_fence 493 drivers/gpu/drm/scheduler/sched_main.c dma_fence_set_error(&s_fence->finished, -ECANCELED); s_fence 495 drivers/gpu/drm/scheduler/sched_main.c dma_fence_put(s_job->s_fence->parent); s_fence 499 drivers/gpu/drm/scheduler/sched_main.c s_job->s_fence->parent = NULL; s_fence 500 drivers/gpu/drm/scheduler/sched_main.c dma_fence_set_error(&s_fence->finished, PTR_ERR(fence)); s_fence 502 drivers/gpu/drm/scheduler/sched_main.c s_job->s_fence->parent = fence; s_fence 537 drivers/gpu/drm/scheduler/sched_main.c job->s_fence = drm_sched_fence_create(entity, owner); s_fence 538 drivers/gpu/drm/scheduler/sched_main.c if (!job->s_fence) s_fence 555 drivers/gpu/drm/scheduler/sched_main.c dma_fence_put(&job->s_fence->finished); s_fence 556 drivers/gpu/drm/scheduler/sched_main.c job->s_fence = NULL; s_fence 622 drivers/gpu/drm/scheduler/sched_main.c struct drm_sched_fence *s_fence = s_job->s_fence; s_fence 623 drivers/gpu/drm/scheduler/sched_main.c struct drm_gpu_scheduler *sched = s_fence->sched; s_fence 628 drivers/gpu/drm/scheduler/sched_main.c trace_drm_sched_process_job(s_fence); s_fence 630 drivers/gpu/drm/scheduler/sched_main.c dma_fence_get(&s_fence->finished); s_fence 631 drivers/gpu/drm/scheduler/sched_main.c drm_sched_fence_finished(s_fence); s_fence 632 drivers/gpu/drm/scheduler/sched_main.c dma_fence_put(&s_fence->finished); s_fence 660 drivers/gpu/drm/scheduler/sched_main.c if (job && dma_fence_is_signaled(&job->s_fence->finished)) { s_fence 708 drivers/gpu/drm/scheduler/sched_main.c struct drm_sched_fence *s_fence; s_fence 732 drivers/gpu/drm/scheduler/sched_main.c s_fence = sched_job->s_fence; s_fence 738 drivers/gpu/drm/scheduler/sched_main.c drm_sched_fence_scheduled(s_fence); s_fence 741 drivers/gpu/drm/scheduler/sched_main.c s_fence->parent = dma_fence_get(fence); s_fence 752 drivers/gpu/drm/scheduler/sched_main.c dma_fence_set_error(&s_fence->finished, PTR_ERR(fence)); s_fence 476 drivers/gpu/drm/v3d/v3d_gem.c job->done_fence = dma_fence_get(&job->base.s_fence->finished); s_fence 96 drivers/gpu/drm/v3d/v3d_sched.c if (unlikely(job->base.base.s_fence->finished.error)) s_fence 148 drivers/gpu/drm/v3d/v3d_sched.c if (unlikely(job->base.base.s_fence->finished.error)) s_fence 186 include/drm/gpu_scheduler.h struct drm_sched_fence *s_fence;