| /linux-4.4.14/include/linux/ |
| D | fence.h | 33 struct fence; 72 struct fence { struct 90 typedef void (*fence_func_t)(struct fence *fence, struct fence_cb *cb); argument 166 const char * (*get_driver_name)(struct fence *fence); 167 const char * (*get_timeline_name)(struct fence *fence); 168 bool (*enable_signaling)(struct fence *fence); 169 bool (*signaled)(struct fence *fence); 170 signed long (*wait)(struct fence *fence, bool intr, signed long timeout); 171 void (*release)(struct fence *fence); 173 int (*fill_driver_data)(struct fence *fence, void *data, int size); [all …]
|
| D | seqno-fence.h | 32 struct fence base; 50 to_seqno_fence(struct fence *fence) in to_seqno_fence() argument 52 if (fence->ops != &seqno_fence_ops) in to_seqno_fence() 54 return container_of(fence, struct seqno_fence, base); in to_seqno_fence() 95 seqno_fence_init(struct seqno_fence *fence, spinlock_t *lock, in seqno_fence_init() argument 101 BUG_ON(!fence || !sync_buf || !ops); in seqno_fence_init() 109 fence->ops = ops; in seqno_fence_init() 110 fence_init(&fence->base, &seqno_fence_ops, lock, context, seqno); in seqno_fence_init() 112 fence->sync_buf = sync_buf; in seqno_fence_init() 113 fence->seqno_ofs = seqno_ofs; in seqno_fence_init() [all …]
|
| D | reservation.h | 55 struct fence __rcu *shared[]; 62 struct fence __rcu *fence_excl; 63 struct reservation_object_list __rcu *fence; member 77 RCU_INIT_POINTER(obj->fence, NULL); in reservation_object_init() 87 struct fence *excl; in reservation_object_fini() 97 fobj = rcu_dereference_protected(obj->fence, 1); in reservation_object_fini() 112 return rcu_dereference_protected(obj->fence, in reservation_object_get_list() 116 static inline struct fence * 125 struct fence *fence); 128 struct fence *fence); [all …]
|
| /linux-4.4.14/drivers/dma-buf/ |
| D | fence.c | 66 int fence_signal_locked(struct fence *fence) in fence_signal_locked() argument 71 if (WARN_ON(!fence)) in fence_signal_locked() 74 if (!ktime_to_ns(fence->timestamp)) { in fence_signal_locked() 75 fence->timestamp = ktime_get(); in fence_signal_locked() 79 if (test_and_set_bit(FENCE_FLAG_SIGNALED_BIT, &fence->flags)) { in fence_signal_locked() 87 trace_fence_signaled(fence); in fence_signal_locked() 89 list_for_each_entry_safe(cur, tmp, &fence->cb_list, node) { in fence_signal_locked() 91 cur->func(fence, cur); in fence_signal_locked() 107 int fence_signal(struct fence *fence) in fence_signal() argument 111 if (!fence) in fence_signal() [all …]
|
| D | seqno-fence.c | 24 static const char *seqno_fence_get_driver_name(struct fence *fence) in seqno_fence_get_driver_name() argument 26 struct seqno_fence *seqno_fence = to_seqno_fence(fence); in seqno_fence_get_driver_name() 28 return seqno_fence->ops->get_driver_name(fence); in seqno_fence_get_driver_name() 31 static const char *seqno_fence_get_timeline_name(struct fence *fence) in seqno_fence_get_timeline_name() argument 33 struct seqno_fence *seqno_fence = to_seqno_fence(fence); in seqno_fence_get_timeline_name() 35 return seqno_fence->ops->get_timeline_name(fence); in seqno_fence_get_timeline_name() 38 static bool seqno_enable_signaling(struct fence *fence) in seqno_enable_signaling() argument 40 struct seqno_fence *seqno_fence = to_seqno_fence(fence); in seqno_enable_signaling() 42 return seqno_fence->ops->enable_signaling(fence); in seqno_enable_signaling() 45 static bool seqno_signaled(struct fence *fence) in seqno_signaled() argument [all …]
|
| D | reservation.c | 86 struct fence *fence) in reservation_object_add_shared_inplace() argument 90 fence_get(fence); in reservation_object_add_shared_inplace() 96 struct fence *old_fence; in reservation_object_add_shared_inplace() 101 if (old_fence->context == fence->context) { in reservation_object_add_shared_inplace() 103 RCU_INIT_POINTER(fobj->shared[i], fence); in reservation_object_add_shared_inplace() 116 RCU_INIT_POINTER(fobj->shared[fobj->shared_count], fence); in reservation_object_add_shared_inplace() 127 struct fence *fence) in reservation_object_add_shared_replace() argument 130 struct fence *old_fence = NULL; in reservation_object_add_shared_replace() 132 fence_get(fence); in reservation_object_add_shared_replace() 135 RCU_INIT_POINTER(fobj->shared[0], fence); in reservation_object_add_shared_replace() [all …]
|
| D | Makefile | 1 obj-y := dma-buf.o fence.o reservation.o seqno-fence.o
|
| D | dma-buf.c | 124 static void dma_buf_poll_cb(struct fence *fence, struct fence_cb *cb) in dma_buf_poll_cb() argument 140 struct fence *fence_excl; in dma_buf_poll() 160 fobj = rcu_dereference(resv->fence); in dma_buf_poll() 222 struct fence *fence = rcu_dereference(fobj->shared[i]); in dma_buf_poll() local 224 if (!fence_get_rcu(fence)) { in dma_buf_poll() 235 if (!fence_add_callback(fence, &dcb->cb, in dma_buf_poll() 237 fence_put(fence); in dma_buf_poll() 241 fence_put(fence); in dma_buf_poll()
|
| /linux-4.4.14/include/trace/events/ |
| D | fence.h | 2 #define TRACE_SYSTEM fence 9 struct fence; 14 TP_PROTO(struct fence *fence, struct fence *f1), 16 TP_ARGS(fence, f1), 19 __string(driver, fence->ops->get_driver_name(fence)) 20 __string(timeline, fence->ops->get_driver_name(fence)) 31 __assign_str(driver, fence->ops->get_driver_name(fence)) 32 __assign_str(timeline, fence->ops->get_timeline_name(fence)) 33 __entry->context = fence->context; 34 __entry->seqno = fence->seqno; [all …]
|
| /linux-4.4.14/drivers/staging/android/ |
| D | sync.c | 155 struct sync_fence *fence; in sync_fence_alloc() local 157 fence = kzalloc(size, GFP_KERNEL); in sync_fence_alloc() 158 if (fence == NULL) in sync_fence_alloc() 161 fence->file = anon_inode_getfile("sync_fence", &sync_fence_fops, in sync_fence_alloc() 162 fence, 0); in sync_fence_alloc() 163 if (IS_ERR(fence->file)) in sync_fence_alloc() 166 kref_init(&fence->kref); in sync_fence_alloc() 167 strlcpy(fence->name, name, sizeof(fence->name)); in sync_fence_alloc() 169 init_waitqueue_head(&fence->wq); in sync_fence_alloc() 171 return fence; in sync_fence_alloc() [all …]
|
| D | sync_debug.c | 56 void sync_fence_debug_add(struct sync_fence *fence) in sync_fence_debug_add() argument 61 list_add_tail(&fence->sync_fence_list, &sync_fence_list_head); in sync_fence_debug_add() 65 void sync_fence_debug_remove(struct sync_fence *fence) in sync_fence_debug_remove() argument 70 list_del(&fence->sync_fence_list); in sync_fence_debug_remove() 85 static void sync_print_pt(struct seq_file *s, struct sync_pt *pt, bool fence) in sync_print_pt() argument 94 fence ? parent->name : "", in sync_print_pt() 95 fence ? "_" : "", in sync_print_pt() 111 if (fence) { in sync_print_pt() 146 static void sync_print_fence(struct seq_file *s, struct sync_fence *fence) in sync_print_fence() argument 152 seq_printf(s, "[%p] %s: %s\n", fence, fence->name, in sync_print_fence() [all …]
|
| D | sync.h | 124 struct fence base; 138 struct fence *sync_pt; 139 struct sync_fence *fence; member 170 typedef void (*sync_callback_t)(struct sync_fence *fence, 289 void sync_fence_put(struct sync_fence *fence); 299 void sync_fence_install(struct sync_fence *fence, int fd); 311 int sync_fence_wait_async(struct sync_fence *fence, 325 int sync_fence_cancel_async(struct sync_fence *fence, 336 int sync_fence_wait(struct sync_fence *fence, long timeout); 342 void sync_fence_debug_add(struct sync_fence *fence); [all …]
|
| D | sw_sync.c | 170 struct sync_fence *fence; in sw_sync_ioctl_create_fence() local 188 fence = sync_fence_create(data.name, pt); in sw_sync_ioctl_create_fence() 189 if (!fence) { in sw_sync_ioctl_create_fence() 195 data.fence = fd; in sw_sync_ioctl_create_fence() 197 sync_fence_put(fence); in sw_sync_ioctl_create_fence() 202 sync_fence_install(fence, fd); in sw_sync_ioctl_create_fence()
|
| /linux-4.4.14/drivers/gpu/drm/nouveau/ |
| D | nouveau_fence.c | 44 from_fence(struct fence *fence) in from_fence() argument 46 return container_of(fence, struct nouveau_fence, base); in from_fence() 50 nouveau_fctx(struct nouveau_fence *fence) in nouveau_fctx() argument 52 return container_of(fence->base.lock, struct nouveau_fence_chan, lock); in nouveau_fctx() 56 nouveau_fence_signal(struct nouveau_fence *fence) in nouveau_fence_signal() argument 60 fence_signal_locked(&fence->base); in nouveau_fence_signal() 61 list_del(&fence->head); in nouveau_fence_signal() 62 rcu_assign_pointer(fence->channel, NULL); in nouveau_fence_signal() 64 if (test_bit(FENCE_FLAG_USER_BITS, &fence->base.flags)) { in nouveau_fence_signal() 65 struct nouveau_fence_chan *fctx = nouveau_fctx(fence); in nouveau_fence_signal() [all …]
|
| D | nv04_fence.c | 38 nv04_fence_emit(struct nouveau_fence *fence) in nv04_fence_emit() argument 40 struct nouveau_channel *chan = fence->channel; in nv04_fence_emit() 44 OUT_RING (chan, fence->base.seqno); in nv04_fence_emit() 51 nv04_fence_sync(struct nouveau_fence *fence, in nv04_fence_sync() argument 69 struct nv04_fence_chan *fctx = chan->fence; in nv04_fence_context_del() 71 chan->fence = NULL; in nv04_fence_context_del() 84 chan->fence = fctx; in nv04_fence_context_new() 93 struct nv04_fence_priv *priv = drm->fence; in nv04_fence_destroy() 94 drm->fence = NULL; in nv04_fence_destroy() 103 priv = drm->fence = kzalloc(sizeof(*priv), GFP_KERNEL); in nv04_fence_create()
|
| D | nv84_fence.c | 34 struct nv84_fence_chan *fctx = chan->fence; in nv84_fence_crtc() 74 nv84_fence_emit(struct nouveau_fence *fence) in nv84_fence_emit() argument 76 struct nouveau_channel *chan = fence->channel; in nv84_fence_emit() 77 struct nv84_fence_chan *fctx = chan->fence; in nv84_fence_emit() 80 if (fence->sysmem) in nv84_fence_emit() 85 return fctx->base.emit32(chan, addr, fence->base.seqno); in nv84_fence_emit() 89 nv84_fence_sync(struct nouveau_fence *fence, in nv84_fence_sync() argument 92 struct nv84_fence_chan *fctx = chan->fence; in nv84_fence_sync() 95 if (fence->sysmem) in nv84_fence_sync() 100 return fctx->base.sync32(chan, addr, fence->base.seqno); in nv84_fence_sync() [all …]
|
| D | nv10_fence.c | 30 nv10_fence_emit(struct nouveau_fence *fence) in nv10_fence_emit() argument 32 struct nouveau_channel *chan = fence->channel; in nv10_fence_emit() 36 OUT_RING (chan, fence->base.seqno); in nv10_fence_emit() 44 nv10_fence_sync(struct nouveau_fence *fence, in nv10_fence_sync() argument 59 struct nv10_fence_chan *fctx = chan->fence; in nv10_fence_context_del() 65 chan->fence = NULL; in nv10_fence_context_del() 74 fctx = chan->fence = kzalloc(sizeof(*fctx), GFP_KERNEL); in nv10_fence_context_new() 88 struct nv10_fence_priv *priv = drm->fence; in nv10_fence_destroy() 93 drm->fence = NULL; in nv10_fence_destroy() 102 priv = drm->fence = kzalloc(sizeof(*priv), GFP_KERNEL); in nv10_fence_create()
|
| D | nv17_fence.c | 33 nv17_fence_sync(struct nouveau_fence *fence, in nv17_fence_sync() argument 37 struct nv10_fence_priv *priv = chan->drm->fence; in nv17_fence_sync() 38 struct nv10_fence_chan *fctx = chan->fence; in nv17_fence_sync() 76 struct nv10_fence_priv *priv = chan->drm->fence; in nv17_fence_context_new() 83 fctx = chan->fence = kzalloc(sizeof(*fctx), GFP_KERNEL); in nv17_fence_context_new() 108 struct nv10_fence_priv *priv = drm->fence; in nv17_fence_resume() 119 priv = drm->fence = kzalloc(sizeof(*priv), GFP_KERNEL); in nv17_fence_create()
|
| D | nouveau_fence.h | 11 struct fence base; 27 void nouveau_fence_work(struct fence *, void (*)(void *), void *); 64 #define nouveau_fence(drm) ((struct nouveau_fence_priv *)(drm)->fence)
|
| D | nouveau_gem.c | 122 struct fence *fence = NULL; in nouveau_gem_object_unmap() local 131 fence = rcu_dereference_protected(fobj->shared[0], in nouveau_gem_object_unmap() 134 fence = reservation_object_get_excl(nvbo->bo.resv); in nouveau_gem_object_unmap() 136 if (fence && mapped) { in nouveau_gem_object_unmap() 137 nouveau_fence_work(fence, nouveau_gem_object_delete, vma); in nouveau_gem_object_unmap() 332 validate_fini_no_ticket(struct validate_op *op, struct nouveau_fence *fence, in validate_fini_no_ticket() argument 342 if (likely(fence)) in validate_fini_no_ticket() 343 nouveau_bo_fence(nvbo, fence, !!b->write_domains); in validate_fini_no_ticket() 358 validate_fini(struct validate_op *op, struct nouveau_fence *fence, in validate_fini() argument 361 validate_fini_no_ticket(op, fence, pbbo); in validate_fini() [all …]
|
| D | nv50_fence.c | 38 struct nv10_fence_priv *priv = chan->drm->fence; in nv50_fence_context_new() 45 fctx = chan->fence = kzalloc(sizeof(*fctx), GFP_KERNEL); in nv50_fence_context_new() 90 priv = drm->fence = kzalloc(sizeof(*priv), GFP_KERNEL); in nv50_fence_create()
|
| D | nvc0_fence.c | 68 struct nv84_fence_chan *fctx = chan->fence; in nvc0_fence_context_new() 80 struct nv84_fence_priv *priv = drm->fence; in nvc0_fence_create()
|
| D | nouveau_chan.c | 46 if (likely(chan && chan->fence)) { in nouveau_channel_idle() 48 struct nouveau_fence *fence = NULL; in nouveau_channel_idle() local 51 ret = nouveau_fence_new(chan, false, &fence); in nouveau_channel_idle() 53 ret = nouveau_fence_wait(fence, false, false); in nouveau_channel_idle() 54 nouveau_fence_unref(&fence); in nouveau_channel_idle() 71 if (chan->fence) in nouveau_channel_del()
|
| D | nouveau_bo.c | 55 nouveau_fence_unref(®->fence); in nv10_bo_update_tile_region() 75 (!tile->fence || nouveau_fence_done(tile->fence))) in nv10_bo_get_tile_region() 86 struct fence *fence) in nv10_bo_put_tile_region() argument 92 tile->fence = (struct nouveau_fence *)fence_get(fence); in nv10_bo_put_tile_region() 1062 struct nouveau_fence *fence; in nouveau_bo_move_m2mf() local 1080 ret = nouveau_fence_new(chan, false, &fence); in nouveau_bo_move_m2mf() 1083 &fence->base, in nouveau_bo_move_m2mf() 1087 nouveau_fence_unref(&fence); in nouveau_bo_move_m2mf() 1276 struct fence *fence = reservation_object_get_excl(bo->resv); in nouveau_bo_vm_cleanup() local 1278 nv10_bo_put_tile_region(dev, *old_tile, fence); in nouveau_bo_vm_cleanup() [all …]
|
| D | nouveau_drm.h | 64 struct nouveau_fence *fence; member 143 void *fence; member
|
| D | nouveau_chan.h | 25 void *fence; member
|
| D | nouveau_display.c | 670 struct nouveau_fence_chan *fctx = chan->fence; in nouveau_page_flip_emit() 722 struct nouveau_fence *fence; in nouveau_crtc_page_flip() local 796 ret = nouveau_page_flip_emit(chan, old_bo, new_bo, s, &fence); in nouveau_crtc_page_flip() 804 nouveau_bo_fence(old_bo, fence, false); in nouveau_crtc_page_flip() 808 nouveau_fence_unref(&fence); in nouveau_crtc_page_flip() 827 struct nouveau_fence_chan *fctx = chan->fence; in nouveau_finish_page_flip()
|
| D | nouveau_drm.c | 152 if (drm->fence) in nouveau_accel_fini() 563 if (drm->fence && nouveau_fence(drm)->suspend) { in nouveau_do_suspend() 588 if (drm->fence && nouveau_fence(drm)->resume) in nouveau_do_suspend() 609 if (drm->fence && nouveau_fence(drm)->resume) in nouveau_do_resume()
|
| /linux-4.4.14/drivers/gpu/drm/virtio/ |
| D | virtgpu_fence.c | 29 static const char *virtio_get_driver_name(struct fence *f) in virtio_get_driver_name() 34 static const char *virtio_get_timeline_name(struct fence *f) in virtio_get_timeline_name() 39 static bool virtio_enable_signaling(struct fence *f) in virtio_enable_signaling() 44 static bool virtio_signaled(struct fence *f) in virtio_signaled() 46 struct virtio_gpu_fence *fence = to_virtio_fence(f); in virtio_signaled() local 48 if (atomic64_read(&fence->drv->last_seq) >= fence->seq) in virtio_signaled() 53 static void virtio_fence_value_str(struct fence *f, char *str, int size) in virtio_fence_value_str() 55 struct virtio_gpu_fence *fence = to_virtio_fence(f); in virtio_fence_value_str() local 57 snprintf(str, size, "%llu", fence->seq); in virtio_fence_value_str() 60 static void virtio_timeline_value_str(struct fence *f, char *str, int size) in virtio_timeline_value_str() [all …]
|
| D | virtgpu_ioctl.c | 99 struct virtio_gpu_fence *fence; in virtio_gpu_execbuffer() local 168 vfpriv->ctx_id, &fence); in virtio_gpu_execbuffer() 170 ttm_eu_fence_buffer_objects(&ticket, &validate_list, &fence->f); in virtio_gpu_execbuffer() 175 fence_put(&fence->f); in virtio_gpu_execbuffer() 234 struct virtio_gpu_fence *fence = NULL; in virtio_gpu_resource_create_ioctl() local 299 ret = virtio_gpu_object_attach(vgdev, qobj, res_id, &fence); in virtio_gpu_resource_create_ioctl() 304 ttm_eu_fence_buffer_objects(&ticket, &validate_list, &fence->f); in virtio_gpu_resource_create_ioctl() 315 fence_put(&fence->f); in virtio_gpu_resource_create_ioctl() 326 fence_put(&fence->f); in virtio_gpu_resource_create_ioctl() 332 fence_put(&fence->f); in virtio_gpu_resource_create_ioctl() [all …]
|
| D | virtgpu_drv.h | 83 struct fence f; 270 struct virtio_gpu_fence **fence); 282 struct virtio_gpu_fence **fence); 306 uint32_t ctx_id, struct virtio_gpu_fence **fence); 311 struct virtio_gpu_fence **fence); 316 struct virtio_gpu_fence **fence); 320 struct virtio_gpu_fence **fence); 348 struct virtio_gpu_fence **fence);
|
| D | virtgpu_vq.c | 353 struct virtio_gpu_fence **fence) in virtio_gpu_queue_fenced_ctrl_buffer() argument 375 if (fence) in virtio_gpu_queue_fenced_ctrl_buffer() 376 virtio_gpu_fence_emit(vgdev, hdr, fence); in virtio_gpu_queue_fenced_ctrl_buffer() 518 struct virtio_gpu_fence **fence) in virtio_gpu_cmd_transfer_to_host_2d() argument 534 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, &cmd_p->hdr, fence); in virtio_gpu_cmd_transfer_to_host_2d() 542 struct virtio_gpu_fence **fence) in virtio_gpu_cmd_resource_attach_backing() argument 557 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, &cmd_p->hdr, fence); in virtio_gpu_cmd_resource_attach_backing() 798 struct virtio_gpu_fence **fence) in virtio_gpu_cmd_resource_create_3d() argument 810 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, &cmd_p->hdr, fence); in virtio_gpu_cmd_resource_create_3d() 817 struct virtio_gpu_fence **fence) in virtio_gpu_cmd_transfer_to_host_3d() argument [all …]
|
| D | virtgpu_display.c | 69 struct virtio_gpu_fence *fence = NULL; in virtio_gpu_crtc_cursor_set() local 92 0, 0, &fence); in virtio_gpu_crtc_cursor_set() 96 &fence->f); in virtio_gpu_crtc_cursor_set() 97 fence_put(&fence->f); in virtio_gpu_crtc_cursor_set()
|
| /linux-4.4.14/drivers/gpu/drm/amd/scheduler/ |
| D | sched_fence.c | 32 struct amd_sched_fence *fence = NULL; in amd_sched_fence_create() local 35 fence = kmem_cache_zalloc(sched_fence_slab, GFP_KERNEL); in amd_sched_fence_create() 36 if (fence == NULL) in amd_sched_fence_create() 39 INIT_LIST_HEAD(&fence->scheduled_cb); in amd_sched_fence_create() 40 fence->owner = owner; in amd_sched_fence_create() 41 fence->sched = s_entity->sched; in amd_sched_fence_create() 42 spin_lock_init(&fence->lock); in amd_sched_fence_create() 45 fence_init(&fence->base, &amd_sched_fence_ops, &fence->lock, in amd_sched_fence_create() 48 return fence; in amd_sched_fence_create() 51 void amd_sched_fence_signal(struct amd_sched_fence *fence) in amd_sched_fence_signal() argument [all …]
|
| D | gpu_sched_trace.h | 20 __field(struct fence *, fence) 29 __entry->fence = &sched_job->s_fence->base; 37 __entry->entity, __entry->sched_job, __entry->fence, __entry->name, 42 TP_PROTO(struct amd_sched_fence *fence), 43 TP_ARGS(fence), 45 __field(struct fence *, fence) 49 __entry->fence = &fence->base; 51 TP_printk("fence=%p signaled", __entry->fence)
|
| D | gpu_scheduler.h | 55 struct fence *dependency; 71 struct fence base; 88 static inline struct amd_sched_fence *to_amd_sched_fence(struct fence *f) in to_amd_sched_fence() 103 struct fence *(*dependency)(struct amd_sched_job *sched_job); 104 struct fence *(*run_job)(struct amd_sched_job *sched_job); 140 void amd_sched_fence_scheduled(struct amd_sched_fence *fence); 141 void amd_sched_fence_signal(struct amd_sched_fence *fence);
|
| D | gpu_scheduler.c | 222 static void amd_sched_entity_wakeup(struct fence *f, struct fence_cb *cb) in amd_sched_entity_wakeup() 234 struct fence * fence = entity->dependency; in amd_sched_entity_add_dependency_cb() local 237 if (fence->context == entity->fence_context) { in amd_sched_entity_add_dependency_cb() 243 s_fence = to_amd_sched_fence(fence); in amd_sched_entity_add_dependency_cb() 246 if (test_bit(AMD_SCHED_FENCE_SCHEDULED_BIT, &fence->flags)) { in amd_sched_entity_add_dependency_cb() 364 static void amd_sched_process_job(struct fence *f, struct fence_cb *cb) in amd_sched_process_job() 419 struct fence *fence; in amd_sched_main() local 444 fence = sched->ops->run_job(sched_job); in amd_sched_main() 446 if (fence) { in amd_sched_main() 447 r = fence_add_callback(fence, &s_fence->cb, in amd_sched_main() [all …]
|
| /linux-4.4.14/drivers/gpu/drm/radeon/ |
| D | radeon_fence.c | 130 struct radeon_fence **fence, in radeon_fence_emit() argument 136 *fence = kmalloc(sizeof(struct radeon_fence), GFP_KERNEL); in radeon_fence_emit() 137 if ((*fence) == NULL) { in radeon_fence_emit() 140 (*fence)->rdev = rdev; in radeon_fence_emit() 141 (*fence)->seq = seq; in radeon_fence_emit() 142 (*fence)->ring = ring; in radeon_fence_emit() 143 (*fence)->is_vm_update = false; in radeon_fence_emit() 144 fence_init(&(*fence)->base, &radeon_fence_ops, in radeon_fence_emit() 146 radeon_fence_ring_emit(rdev, ring, *fence); in radeon_fence_emit() 147 trace_radeon_fence_emit(rdev->ddev, ring, (*fence)->seq); in radeon_fence_emit() [all …]
|
| D | radeon_sync.c | 64 struct radeon_fence *fence) in radeon_sync_fence() argument 68 if (!fence) in radeon_sync_fence() 71 other = sync->sync_to[fence->ring]; in radeon_sync_fence() 72 sync->sync_to[fence->ring] = radeon_fence_later(fence, other); in radeon_sync_fence() 74 if (fence->is_vm_update) { in radeon_sync_fence() 76 sync->last_vm_update = radeon_fence_later(fence, other); in radeon_sync_fence() 95 struct fence *f; in radeon_sync_resv() 96 struct radeon_fence *fence; in radeon_sync_resv() local 102 fence = f ? to_radeon_fence(f) : NULL; in radeon_sync_resv() 103 if (fence && fence->rdev == rdev) in radeon_sync_resv() [all …]
|
| D | radeon_sa.c | 150 radeon_fence_unref(&sa_bo->fence); in radeon_sa_bo_remove_locked() 163 if (sa_bo->fence == NULL || !radeon_fence_signaled(sa_bo->fence)) { in radeon_sa_bo_try_free() 277 if (!radeon_fence_signaled(sa_bo->fence)) { in radeon_sa_bo_next_hole() 278 fences[i] = sa_bo->fence; in radeon_sa_bo_next_hole() 301 ++tries[best_bo->fence->ring]; in radeon_sa_bo_next_hole() 329 (*sa_bo)->fence = NULL; in radeon_sa_bo_new() 377 struct radeon_fence *fence) in radeon_sa_bo_free() argument 387 if (fence && !radeon_fence_signaled(fence)) { in radeon_sa_bo_free() 388 (*sa_bo)->fence = radeon_fence_ref(fence); in radeon_sa_bo_free() 390 &sa_manager->flist[fence->ring]); in radeon_sa_bo_free() [all …]
|
| D | radeon_test.c | 87 struct radeon_fence *fence = NULL; in radeon_do_test_moves() local 120 fence = radeon_copy_dma(rdev, gtt_addr, vram_addr, in radeon_do_test_moves() 124 fence = radeon_copy_blit(rdev, gtt_addr, vram_addr, in radeon_do_test_moves() 127 if (IS_ERR(fence)) { in radeon_do_test_moves() 129 r = PTR_ERR(fence); in radeon_do_test_moves() 133 r = radeon_fence_wait(fence, false); in radeon_do_test_moves() 139 radeon_fence_unref(&fence); in radeon_do_test_moves() 171 fence = radeon_copy_dma(rdev, vram_addr, gtt_addr, in radeon_do_test_moves() 175 fence = radeon_copy_blit(rdev, vram_addr, gtt_addr, in radeon_do_test_moves() 178 if (IS_ERR(fence)) { in radeon_do_test_moves() [all …]
|
| D | evergreen_dma.c | 42 struct radeon_fence *fence) in evergreen_dma_fence_ring_emit() argument 44 struct radeon_ring *ring = &rdev->ring[fence->ring]; in evergreen_dma_fence_ring_emit() 45 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in evergreen_dma_fence_ring_emit() 50 radeon_ring_write(ring, fence->seq); in evergreen_dma_fence_ring_emit() 113 struct radeon_fence *fence; in evergreen_copy_dma() local 149 r = radeon_fence_emit(rdev, &fence, ring->idx); in evergreen_copy_dma() 157 radeon_sync_free(rdev, &sync, fence); in evergreen_copy_dma() 159 return fence; in evergreen_copy_dma()
|
| D | radeon_ib.c | 70 ib->fence = NULL; in radeon_ib_get() 96 radeon_sync_free(rdev, &ib->sync, ib->fence); in radeon_ib_free() 97 radeon_sa_bo_free(rdev, &ib->sa_bo, ib->fence); in radeon_ib_free() 98 radeon_fence_unref(&ib->fence); in radeon_ib_free() 165 r = radeon_fence_emit(rdev, &ib->fence, ib->ring); in radeon_ib_schedule() 172 const_ib->fence = radeon_fence_ref(ib->fence); in radeon_ib_schedule() 176 radeon_vm_fence(rdev, ib->vm, ib->fence); in radeon_ib_schedule()
|
| D | radeon_benchmark.c | 42 struct radeon_fence *fence = NULL; in radeon_benchmark_do_move() local 49 fence = radeon_copy_dma(rdev, saddr, daddr, in radeon_benchmark_do_move() 54 fence = radeon_copy_blit(rdev, saddr, daddr, in radeon_benchmark_do_move() 62 if (IS_ERR(fence)) in radeon_benchmark_do_move() 63 return PTR_ERR(fence); in radeon_benchmark_do_move() 65 r = radeon_fence_wait(fence, false); in radeon_benchmark_do_move() 66 radeon_fence_unref(&fence); in radeon_benchmark_do_move()
|
| D | rv770_dma.c | 47 struct radeon_fence *fence; in rv770_copy_dma() local 83 r = radeon_fence_emit(rdev, &fence, ring->idx); in rv770_copy_dma() 91 radeon_sync_free(rdev, &sync, fence); in rv770_copy_dma() 93 return fence; in rv770_copy_dma()
|
| D | r600_dma.c | 288 struct radeon_fence *fence) in r600_dma_fence_ring_emit() argument 290 struct radeon_ring *ring = &rdev->ring[fence->ring]; in r600_dma_fence_ring_emit() 291 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in r600_dma_fence_ring_emit() 297 radeon_ring_write(ring, lower_32_bits(fence->seq)); in r600_dma_fence_ring_emit() 371 r = radeon_fence_wait(ib.fence, false); in r600_dma_ib_test() 383 DRM_INFO("ib test on ring %d succeeded in %u usecs\n", ib.fence->ring, i); in r600_dma_ib_test() 444 struct radeon_fence *fence; in r600_copy_dma() local 480 r = radeon_fence_emit(rdev, &fence, ring->idx); in r600_copy_dma() 488 radeon_sync_free(rdev, &sync, fence); in r600_copy_dma() 490 return fence; in r600_copy_dma()
|
| D | radeon_vm.c | 196 struct radeon_fence *fence = rdev->vm_manager.active[i]; in radeon_vm_grab_id() local 198 if (fence == NULL) { in radeon_vm_grab_id() 205 if (radeon_fence_is_earlier(fence, best[fence->ring])) { in radeon_vm_grab_id() 206 best[fence->ring] = fence; in radeon_vm_grab_id() 207 choices[fence->ring == ring ? 0 : 1] = i; in radeon_vm_grab_id() 270 struct radeon_fence *fence) in radeon_vm_fence() argument 272 unsigned vm_id = vm->ids[fence->ring].id; in radeon_vm_fence() 275 rdev->vm_manager.active[vm_id] = radeon_fence_ref(fence); in radeon_vm_fence() 277 radeon_fence_unref(&vm->ids[fence->ring].last_id_use); in radeon_vm_fence() 278 vm->ids[fence->ring].last_id_use = radeon_fence_ref(fence); in radeon_vm_fence() [all …]
|
| D | radeon_vce.c | 347 uint32_t handle, struct radeon_fence **fence) in radeon_vce_get_create_msg() argument 395 if (fence) in radeon_vce_get_create_msg() 396 *fence = radeon_fence_ref(ib.fence); in radeon_vce_get_create_msg() 414 uint32_t handle, struct radeon_fence **fence) in radeon_vce_get_destroy_msg() argument 452 if (fence) in radeon_vce_get_destroy_msg() 453 *fence = radeon_fence_ref(ib.fence); in radeon_vce_get_destroy_msg() 736 struct radeon_fence *fence) in radeon_vce_fence_emit() argument 738 struct radeon_ring *ring = &rdev->ring[fence->ring]; in radeon_vce_fence_emit() 739 uint64_t addr = rdev->fence_drv[fence->ring].gpu_addr; in radeon_vce_fence_emit() 744 radeon_ring_write(ring, cpu_to_le32(fence->seq)); in radeon_vce_fence_emit() [all …]
|
| D | radeon_uvd.c | 215 struct radeon_fence *fence; in radeon_uvd_suspend() local 220 R600_RING_TYPE_UVD_INDEX, handle, &fence); in radeon_uvd_suspend() 226 radeon_fence_wait(fence, false); in radeon_uvd_suspend() 227 radeon_fence_unref(&fence); in radeon_uvd_suspend() 290 struct radeon_fence *fence; in radeon_uvd_free_handles() local 295 R600_RING_TYPE_UVD_INDEX, handle, &fence); in radeon_uvd_free_handles() 301 radeon_fence_wait(fence, false); in radeon_uvd_free_handles() 302 radeon_fence_unref(&fence); in radeon_uvd_free_handles() 426 struct fence *f; in radeon_uvd_cs_msg() 697 struct radeon_fence **fence) in radeon_uvd_send_msg() argument [all …]
|
| D | uvd_v1_0.c | 82 struct radeon_fence *fence) in uvd_v1_0_fence_emit() argument 84 struct radeon_ring *ring = &rdev->ring[fence->ring]; in uvd_v1_0_fence_emit() 85 uint64_t addr = rdev->fence_drv[fence->ring].gpu_addr; in uvd_v1_0_fence_emit() 90 radeon_ring_write(ring, fence->seq); in uvd_v1_0_fence_emit() 501 struct radeon_fence *fence = NULL; in uvd_v1_0_ib_test() local 519 r = radeon_uvd_get_destroy_msg(rdev, ring->idx, 1, &fence); in uvd_v1_0_ib_test() 525 r = radeon_fence_wait(fence, false); in uvd_v1_0_ib_test() 532 radeon_fence_unref(&fence); in uvd_v1_0_ib_test()
|
| D | uvd_v2_2.c | 40 struct radeon_fence *fence) in uvd_v2_2_fence_emit() argument 42 struct radeon_ring *ring = &rdev->ring[fence->ring]; in uvd_v2_2_fence_emit() 43 uint64_t addr = rdev->fence_drv[fence->ring].gpu_addr; in uvd_v2_2_fence_emit() 46 radeon_ring_write(ring, fence->seq); in uvd_v2_2_fence_emit()
|
| D | cik_sdma.c | 201 struct radeon_fence *fence) in cik_sdma_fence_ring_emit() argument 203 struct radeon_ring *ring = &rdev->ring[fence->ring]; in cik_sdma_fence_ring_emit() 204 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in cik_sdma_fence_ring_emit() 210 radeon_ring_write(ring, fence->seq); in cik_sdma_fence_ring_emit() 214 cik_sdma_hdp_flush_ring_emit(rdev, fence->ring); in cik_sdma_fence_ring_emit() 584 struct radeon_fence *fence; in cik_copy_dma() local 622 r = radeon_fence_emit(rdev, &fence, ring->idx); in cik_copy_dma() 630 radeon_sync_free(rdev, &sync, fence); in cik_copy_dma() 632 return fence; in cik_copy_dma() 740 r = radeon_fence_wait(ib.fence, false); in cik_sdma_ib_test() [all …]
|
| D | si_dma.c | 236 struct radeon_fence *fence; in si_copy_dma() local 272 r = radeon_fence_emit(rdev, &fence, ring->idx); in si_copy_dma() 280 radeon_sync_free(rdev, &sync, fence); in si_copy_dma() 282 return fence; in si_copy_dma()
|
| D | radeon_semaphore.c | 94 struct radeon_fence *fence) in radeon_semaphore_free() argument 103 radeon_sa_bo_free(rdev, &(*semaphore)->sa_bo, fence); in radeon_semaphore_free()
|
| D | radeon.h | 367 struct fence base; 382 int radeon_fence_emit(struct radeon_device *rdev, struct radeon_fence **fence, int ring); 384 bool radeon_fence_signaled(struct radeon_fence *fence); 385 int radeon_fence_wait(struct radeon_fence *fence, bool interruptible); 391 struct radeon_fence *radeon_fence_ref(struct radeon_fence *fence); 392 void radeon_fence_unref(struct radeon_fence **fence); 394 bool radeon_fence_need_sync(struct radeon_fence *fence, int ring); 395 void radeon_fence_note_sync(struct radeon_fence *fence, int ring); 561 struct radeon_fence *fence; member 603 struct radeon_fence *fence); [all …]
|
| D | radeon_object.h | 156 extern void radeon_bo_fence(struct radeon_bo *bo, struct radeon_fence *fence, 189 struct radeon_fence *fence);
|
| D | radeon_asic.h | 77 struct radeon_fence *fence); 173 struct radeon_fence *fence); 323 struct radeon_fence *fence); 329 struct radeon_fence *fence); 543 struct radeon_fence *fence); 603 struct radeon_fence *fence); 707 struct radeon_fence *fence); 789 struct radeon_fence *fence); 807 struct radeon_fence *fence); 809 struct radeon_fence *fence); [all …]
|
| D | radeon_display.c | 411 if (work->fence) { in radeon_flip_work_func() 412 struct radeon_fence *fence; in radeon_flip_work_func() local 414 fence = to_radeon_fence(work->fence); in radeon_flip_work_func() 415 if (fence && fence->rdev == rdev) { in radeon_flip_work_func() 416 r = radeon_fence_wait(fence, false); in radeon_flip_work_func() 425 r = fence_wait(work->fence, false); in radeon_flip_work_func() 435 fence_put(work->fence); in radeon_flip_work_func() 436 work->fence = NULL; in radeon_flip_work_func() 559 work->fence = fence_get(reservation_object_get_excl(new_rbo->tbo.resv)); in radeon_crtc_page_flip() 641 fence_put(work->fence); in radeon_crtc_page_flip()
|
| D | r200.c | 90 struct radeon_fence *fence; in r200_copy_dma() local 122 r = radeon_fence_emit(rdev, &fence, RADEON_RING_TYPE_GFX_INDEX); in r200_copy_dma() 128 return fence; in r200_copy_dma()
|
| D | radeon_ttm.c | 260 struct radeon_fence *fence; in radeon_move_blit() local 299 fence = radeon_copy(rdev, old_start, new_start, num_pages, bo->resv); in radeon_move_blit() 300 if (IS_ERR(fence)) in radeon_move_blit() 301 return PTR_ERR(fence); in radeon_move_blit() 303 r = ttm_bo_move_accel_cleanup(bo, &fence->base, in radeon_move_blit() 305 radeon_fence_unref(&fence); in radeon_move_blit()
|
| D | radeon_object.c | 850 void radeon_bo_fence(struct radeon_bo *bo, struct radeon_fence *fence, in radeon_bo_fence() argument 856 reservation_object_add_shared_fence(resv, &fence->base); in radeon_bo_fence() 858 reservation_object_add_excl_fence(resv, &fence->base); in radeon_bo_fence()
|
| D | r300.c | 209 struct radeon_fence *fence) in r300_fence_ring_emit() argument 211 struct radeon_ring *ring = &rdev->ring[fence->ring]; in r300_fence_ring_emit() 236 radeon_ring_write(ring, PACKET0(rdev->fence_drv[fence->ring].scratch_reg, 0)); in r300_fence_ring_emit() 237 radeon_ring_write(ring, fence->seq); in r300_fence_ring_emit()
|
| D | r600.c | 2865 struct radeon_fence *fence) in r600_fence_ring_emit() argument 2867 struct radeon_ring *ring = &rdev->ring[fence->ring]; in r600_fence_ring_emit() 2875 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in r600_fence_ring_emit() 2887 radeon_ring_write(ring, fence->seq); in r600_fence_ring_emit() 2904 …radeon_ring_write(ring, ((rdev->fence_drv[fence->ring].scratch_reg - PACKET3_SET_CONFIG_REG_OFFSET… in r600_fence_ring_emit() 2905 radeon_ring_write(ring, fence->seq); in r600_fence_ring_emit() 2966 struct radeon_fence *fence; in r600_copy_cpdma() local 3012 r = radeon_fence_emit(rdev, &fence, ring->idx); in r600_copy_cpdma() 3020 radeon_sync_free(rdev, &sync, fence); in r600_copy_cpdma() 3022 return fence; in r600_copy_cpdma() [all …]
|
| D | ni.c | 1409 struct radeon_fence *fence) in cayman_fence_ring_emit() argument 1411 struct radeon_ring *ring = &rdev->ring[fence->ring]; in cayman_fence_ring_emit() 1412 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in cayman_fence_ring_emit() 1427 radeon_ring_write(ring, fence->seq); in cayman_fence_ring_emit()
|
| D | cik.c | 3963 struct radeon_fence *fence) in cik_fence_gfx_ring_emit() argument 3965 struct radeon_ring *ring = &rdev->ring[fence->ring]; in cik_fence_gfx_ring_emit() 3966 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in cik_fence_gfx_ring_emit() 3979 radeon_ring_write(ring, fence->seq - 1); in cik_fence_gfx_ring_emit() 3990 radeon_ring_write(ring, fence->seq); in cik_fence_gfx_ring_emit() 4004 struct radeon_fence *fence) in cik_fence_compute_ring_emit() argument 4006 struct radeon_ring *ring = &rdev->ring[fence->ring]; in cik_fence_compute_ring_emit() 4007 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in cik_fence_compute_ring_emit() 4018 radeon_ring_write(ring, fence->seq); in cik_fence_compute_ring_emit() 4072 struct radeon_fence *fence; in cik_copy_cpdma() local [all …]
|
| D | r100.c | 851 struct radeon_fence *fence) in r100_fence_ring_emit() argument 853 struct radeon_ring *ring = &rdev->ring[fence->ring]; in r100_fence_ring_emit() 866 radeon_ring_write(ring, PACKET0(rdev->fence_drv[fence->ring].scratch_reg, 0)); in r100_fence_ring_emit() 867 radeon_ring_write(ring, fence->seq); in r100_fence_ring_emit() 889 struct radeon_fence *fence; in r100_copy_blit() local 950 r = radeon_fence_emit(rdev, &fence, RADEON_RING_TYPE_GFX_INDEX); in r100_copy_blit() 956 return fence; in r100_copy_blit() 3734 r = radeon_fence_wait(ib.fence, false); in r100_ib_test()
|
| D | radeon_cs.c | 415 &parser->ib.fence->base); in radeon_cs_parser_fini()
|
| D | si.c | 3372 struct radeon_fence *fence) in si_fence_ring_emit() argument 3374 struct radeon_ring *ring = &rdev->ring[fence->ring]; in si_fence_ring_emit() 3375 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in si_fence_ring_emit() 3394 radeon_ring_write(ring, fence->seq); in si_fence_ring_emit()
|
| /linux-4.4.14/drivers/gpu/drm/vmwgfx/ |
| D | vmwgfx_fence.c | 54 struct vmw_fence_obj fence; member 77 struct vmw_fence_obj *fence; member 85 fman_from_fence(struct vmw_fence_obj *fence) in fman_from_fence() argument 87 return container_of(fence->base.lock, struct vmw_fence_manager, lock); in fman_from_fence() 112 static void vmw_fence_obj_destroy(struct fence *f) in vmw_fence_obj_destroy() 114 struct vmw_fence_obj *fence = in vmw_fence_obj_destroy() local 117 struct vmw_fence_manager *fman = fman_from_fence(fence); in vmw_fence_obj_destroy() 121 list_del_init(&fence->head); in vmw_fence_obj_destroy() 124 fence->destroy(fence); in vmw_fence_obj_destroy() 127 static const char *vmw_fence_get_driver_name(struct fence *f) in vmw_fence_get_driver_name() [all …]
|
| D | vmwgfx_fence.h | 55 struct fence base; 59 void (*destroy)(struct vmw_fence_obj *fence); 70 struct vmw_fence_obj *fence = *fence_p; in vmw_fence_obj_unreference() local 73 if (fence) in vmw_fence_obj_unreference() 74 fence_put(&fence->base); in vmw_fence_obj_unreference() 78 vmw_fence_obj_reference(struct vmw_fence_obj *fence) in vmw_fence_obj_reference() argument 80 if (fence) in vmw_fence_obj_reference() 81 fence_get(&fence->base); in vmw_fence_obj_reference() 82 return fence; in vmw_fence_obj_reference() 87 extern bool vmw_fence_obj_signaled(struct vmw_fence_obj *fence); [all …]
|
| D | vmwgfx_cotable.c | 313 struct vmw_fence_obj *fence; in vmw_cotable_unbind() local 325 (void) vmw_execbuf_fence_commands(NULL, dev_priv, &fence, NULL); in vmw_cotable_unbind() 326 vmw_fence_single_bo(bo, fence); in vmw_cotable_unbind() 327 if (likely(fence != NULL)) in vmw_cotable_unbind() 328 vmw_fence_obj_unreference(&fence); in vmw_cotable_unbind() 350 struct vmw_fence_obj *fence; in vmw_cotable_readback() local 368 (void) vmw_execbuf_fence_commands(NULL, dev_priv, &fence, NULL); in vmw_cotable_readback() 369 vmw_fence_single_bo(&res->backup->base, fence); in vmw_cotable_readback() 370 vmw_fence_obj_unreference(&fence); in vmw_cotable_readback()
|
| D | vmwgfx_context.c | 372 struct vmw_fence_obj *fence; in vmw_gb_context_unbind() local 424 &fence, NULL); in vmw_gb_context_unbind() 426 vmw_fence_single_bo(bo, fence); in vmw_gb_context_unbind() 428 if (likely(fence != NULL)) in vmw_gb_context_unbind() 429 vmw_fence_obj_unreference(&fence); in vmw_gb_context_unbind() 589 struct vmw_fence_obj *fence; in vmw_dx_context_unbind() local 648 &fence, NULL); in vmw_dx_context_unbind() 650 vmw_fence_single_bo(bo, fence); in vmw_dx_context_unbind() 652 if (likely(fence != NULL)) in vmw_dx_context_unbind() 653 vmw_fence_obj_unreference(&fence); in vmw_dx_context_unbind()
|
| D | vmwgfx_shader.c | 283 struct vmw_fence_obj *fence; in vmw_gb_shader_unbind() local 306 &fence, NULL); in vmw_gb_shader_unbind() 308 vmw_fence_single_bo(val_buf->bo, fence); in vmw_gb_shader_unbind() 310 if (likely(fence != NULL)) in vmw_gb_shader_unbind() 311 vmw_fence_obj_unreference(&fence); in vmw_gb_shader_unbind() 525 struct vmw_fence_obj *fence; in vmw_dx_shader_unbind() local 538 &fence, NULL); in vmw_dx_shader_unbind() 539 vmw_fence_single_bo(val_buf->bo, fence); in vmw_dx_shader_unbind() 541 if (likely(fence != NULL)) in vmw_dx_shader_unbind() 542 vmw_fence_obj_unreference(&fence); in vmw_dx_shader_unbind()
|
| D | vmwgfx_reg.h | 47 u32 fence; member
|
| D | vmwgfx_resource.c | 1449 struct vmw_fence_obj *fence) in vmw_fence_single_bo() argument 1456 if (fence == NULL) { in vmw_fence_single_bo() 1457 vmw_execbuf_fence_commands(NULL, dev_priv, &fence, NULL); in vmw_fence_single_bo() 1458 reservation_object_add_excl_fence(bo->resv, &fence->base); in vmw_fence_single_bo() 1459 fence_put(&fence->base); in vmw_fence_single_bo() 1461 reservation_object_add_excl_fence(bo->resv, &fence->base); in vmw_fence_single_bo() 1596 struct vmw_fence_obj *fence; in vmw_query_move_notify() local 1602 (void) vmw_execbuf_fence_commands(NULL, dev_priv, &fence, NULL); in vmw_query_move_notify() 1603 vmw_fence_single_bo(bo, fence); in vmw_query_move_notify() 1605 if (fence != NULL) in vmw_query_move_notify() [all …]
|
| D | vmwgfx_surface.c | 443 struct vmw_fence_obj *fence; in vmw_legacy_srf_dma() local 468 &fence, NULL); in vmw_legacy_srf_dma() 470 vmw_fence_single_bo(val_buf->bo, fence); in vmw_legacy_srf_dma() 472 if (likely(fence != NULL)) in vmw_legacy_srf_dma() 473 vmw_fence_obj_unreference(&fence); in vmw_legacy_srf_dma() 1169 struct vmw_fence_obj *fence; in vmw_gb_surface_unbind() local 1223 &fence, NULL); in vmw_gb_surface_unbind() 1225 vmw_fence_single_bo(val_buf->bo, fence); in vmw_gb_surface_unbind() 1227 if (likely(fence != NULL)) in vmw_gb_surface_unbind() 1228 vmw_fence_obj_unreference(&fence); in vmw_gb_surface_unbind()
|
| D | vmwgfx_scrn.c | 472 struct vmw_fence_obj *fence = NULL; in vmw_sou_crtc_page_flip() local 493 true, &fence); in vmw_sou_crtc_page_flip() 497 0, 0, 1, 1, &fence); in vmw_sou_crtc_page_flip() 502 if (!fence) { in vmw_sou_crtc_page_flip() 510 ret = vmw_event_fence_action_queue(file_priv, fence, in vmw_sou_crtc_page_flip() 521 vmw_fence_obj_unreference(&fence); in vmw_sou_crtc_page_flip()
|
| D | vmwgfx_execbuf.c | 3692 struct vmw_fence_obj *fence, in vmw_execbuf_copy_fence_user() argument 3704 BUG_ON(fence == NULL); in vmw_execbuf_copy_fence_user() 3707 fence_rep.seqno = fence->base.seqno; in vmw_execbuf_copy_fence_user() 3728 (void) vmw_fence_obj_wait(fence, false, false, in vmw_execbuf_copy_fence_user() 3903 struct vmw_fence_obj *fence = NULL; in vmw_execbuf_process() local 4038 &fence, in vmw_execbuf_process() 4052 (void *) fence); in vmw_execbuf_process() 4056 __vmw_execbuf_release_pinned_bo(dev_priv, fence); in vmw_execbuf_process() 4060 user_fence_rep, fence, handle); in vmw_execbuf_process() 4064 *out_fence = fence; in vmw_execbuf_process() [all …]
|
| D | vmwgfx_stdu.c | 707 struct vmw_fence_obj *fence = NULL; in vmw_stdu_crtc_page_flip() local 710 vmw_execbuf_fence_commands(NULL, dev_priv, &fence, NULL); in vmw_stdu_crtc_page_flip() 711 if (!fence) in vmw_stdu_crtc_page_flip() 714 ret = vmw_event_fence_action_queue(file_priv, fence, in vmw_stdu_crtc_page_flip() 719 vmw_fence_obj_unreference(&fence); in vmw_stdu_crtc_page_flip()
|
| D | vmwgfx_kms.c | 1887 struct vmw_fence_obj *fence; in vmw_kms_helper_buffer_finish() local 1891 ret = vmw_execbuf_fence_commands(file_priv, dev_priv, &fence, in vmw_kms_helper_buffer_finish() 1894 vmw_fence_single_bo(&buf->base, fence); in vmw_kms_helper_buffer_finish() 1897 ret, user_fence_rep, fence, in vmw_kms_helper_buffer_finish() 1900 *out_fence = fence; in vmw_kms_helper_buffer_finish() 1902 vmw_fence_obj_unreference(&fence); in vmw_kms_helper_buffer_finish()
|
| D | vmwgfx_drv.h | 667 struct vmw_fence_obj *fence); 821 struct vmw_fence_obj *fence); 833 struct vmw_fence_obj *fence,
|
| D | vmwgfx_fifo.c | 571 cmd_fence->fence = *seqno; in vmw_fifo_send_fence()
|
| /linux-4.4.14/drivers/gpu/drm/amd/amdgpu/ |
| D | amdgpu_sync.c | 37 struct fence *fence; member 61 static bool amdgpu_sync_same_dev(struct amdgpu_device *adev, struct fence *f) in amdgpu_sync_same_dev() 79 static bool amdgpu_sync_test_owner(struct fence *f, void *owner) in amdgpu_sync_test_owner() 90 static void amdgpu_sync_keep_later(struct fence **keep, struct fence *fence) in amdgpu_sync_keep_later() argument 92 if (*keep && fence_is_later(*keep, fence)) in amdgpu_sync_keep_later() 96 *keep = fence_get(fence); in amdgpu_sync_keep_later() 107 struct fence *f) in amdgpu_sync_fence() 110 struct amdgpu_fence *fence; in amdgpu_sync_fence() local 119 fence = to_amdgpu_fence(f); in amdgpu_sync_fence() 120 if (!fence || fence->ring->adev != adev) { in amdgpu_sync_fence() [all …]
|
| D | amdgpu_fence.c | 101 struct amdgpu_fence **fence) in amdgpu_fence_emit() argument 106 *fence = kmem_cache_alloc(amdgpu_fence_slab, GFP_KERNEL); in amdgpu_fence_emit() 107 if ((*fence) == NULL) { in amdgpu_fence_emit() 110 (*fence)->seq = ++ring->fence_drv.sync_seq[ring->idx]; in amdgpu_fence_emit() 111 (*fence)->ring = ring; in amdgpu_fence_emit() 112 (*fence)->owner = owner; in amdgpu_fence_emit() 113 fence_init(&(*fence)->base, &amdgpu_fence_ops, in amdgpu_fence_emit() 116 (*fence)->seq); in amdgpu_fence_emit() 118 (*fence)->seq, in amdgpu_fence_emit() 370 bool amdgpu_fence_need_sync(struct amdgpu_fence *fence, in amdgpu_fence_need_sync() argument [all …]
|
| D | amdgpu_sched.c | 31 static struct fence *amdgpu_sched_dependency(struct amd_sched_job *sched_job) in amdgpu_sched_dependency() 37 static struct fence *amdgpu_sched_run_job(struct amd_sched_job *sched_job) in amdgpu_sched_run_job() 39 struct amdgpu_fence *fence = NULL; in amdgpu_sched_run_job() local 55 fence = job->ibs[job->num_ibs - 1].fence; in amdgpu_sched_run_job() 56 fence_get(&fence->base); in amdgpu_sched_run_job() 63 return fence ? &fence->base : NULL; in amdgpu_sched_run_job() 77 struct fence **f) in amdgpu_sched_ib_submit_kernel_helper() 104 *f = fence_get(&ibs[num_ibs - 1].fence->base); in amdgpu_sched_ib_submit_kernel_helper()
|
| D | amdgpu_sa.c | 150 fence_put(sa_bo->fence); in amdgpu_sa_bo_remove_locked() 163 if (sa_bo->fence == NULL || in amdgpu_sa_bo_try_free() 164 !fence_is_signaled(sa_bo->fence)) { in amdgpu_sa_bo_try_free() 249 struct fence **fences, in amdgpu_sa_bo_next_hole() 278 if (!fence_is_signaled(sa_bo->fence)) { in amdgpu_sa_bo_next_hole() 279 fences[i] = sa_bo->fence; in amdgpu_sa_bo_next_hole() 302 uint32_t idx = amdgpu_ring_from_fence(best_bo->fence)->idx; in amdgpu_sa_bo_next_hole() 318 struct fence *fences[AMDGPU_MAX_RINGS]; in amdgpu_sa_bo_new() 332 (*sa_bo)->fence = NULL; in amdgpu_sa_bo_new() 385 struct fence *fence) in amdgpu_sa_bo_free() argument [all …]
|
| D | amdgpu_test.c | 81 struct fence *fence = NULL; in amdgpu_do_test_moves() local 114 size, NULL, &fence); in amdgpu_do_test_moves() 121 r = fence_wait(fence, false); in amdgpu_do_test_moves() 127 fence_put(fence); in amdgpu_do_test_moves() 159 size, NULL, &fence); in amdgpu_do_test_moves() 166 r = fence_wait(fence, false); in amdgpu_do_test_moves() 172 fence_put(fence); in amdgpu_do_test_moves() 218 if (fence) in amdgpu_do_test_moves() 219 fence_put(fence); in amdgpu_do_test_moves() 243 struct fence **fence) in amdgpu_test_create_and_emit_fence() argument [all …]
|
| D | amdgpu_uvd.h | 32 struct fence **fence); 34 struct fence **fence);
|
| D | amdgpu_cs.c | 492 parser->fence); in amdgpu_cs_parser_fini() 497 fence_put(parser->fence); in amdgpu_cs_parser_fini() 538 struct fence *f; in amdgpu_bo_vm_update_pte() 762 struct fence *fence; in amdgpu_cs_dependencies() local 774 fence = amdgpu_ctx_get_fence(ctx, ring, in amdgpu_cs_dependencies() 776 if (IS_ERR(fence)) { in amdgpu_cs_dependencies() 777 r = PTR_ERR(fence); in amdgpu_cs_dependencies() 781 } else if (fence) { in amdgpu_cs_dependencies() 782 r = amdgpu_sync_fence(adev, &ib->sync, fence); in amdgpu_cs_dependencies() 783 fence_put(fence); in amdgpu_cs_dependencies() [all …]
|
| D | amdgpu_vm.c | 138 struct fence *best[AMDGPU_MAX_RINGS] = {}; in amdgpu_vm_grab_id() 162 struct fence *fence = adev->vm_manager.ids[i].active; in amdgpu_vm_grab_id() local 165 if (fence == NULL) { in amdgpu_vm_grab_id() 172 fring = amdgpu_ring_from_fence(fence); in amdgpu_vm_grab_id() 174 fence_is_later(best[fring->idx], fence)) { in amdgpu_vm_grab_id() 175 best[fring->idx] = fence; in amdgpu_vm_grab_id() 182 struct fence *fence; in amdgpu_vm_grab_id() local 184 fence = adev->vm_manager.ids[choices[i]].active; in amdgpu_vm_grab_id() 188 return amdgpu_sync_fence(ring->adev, sync, fence); in amdgpu_vm_grab_id() 210 struct fence *updates) in amdgpu_vm_flush() [all …]
|
| D | amdgpu_vce.h | 32 struct fence **fence); 34 struct fence **fence);
|
| D | amdgpu_benchmark.c | 36 struct fence *fence = NULL; in amdgpu_benchmark_do_move() local 42 r = amdgpu_copy_buffer(ring, saddr, daddr, size, NULL, &fence); in amdgpu_benchmark_do_move() 45 r = fence_wait(fence, false); in amdgpu_benchmark_do_move() 48 fence_put(fence); in amdgpu_benchmark_do_move() 54 if (fence) in amdgpu_benchmark_do_move() 55 fence_put(fence); in amdgpu_benchmark_do_move()
|
| D | amdgpu_ctx.c | 235 struct fence *fence) in amdgpu_ctx_add_fence() argument 240 struct fence *other = NULL; in amdgpu_ctx_add_fence() 251 fence_get(fence); in amdgpu_ctx_add_fence() 254 cring->fences[idx] = fence; in amdgpu_ctx_add_fence() 263 struct fence *amdgpu_ctx_get_fence(struct amdgpu_ctx *ctx, in amdgpu_ctx_get_fence() 267 struct fence *fence; in amdgpu_ctx_get_fence() local 282 fence = fence_get(cring->fences[seq % AMDGPU_CTX_MAX_CS_PENDING]); in amdgpu_ctx_get_fence() 285 return fence; in amdgpu_ctx_get_fence()
|
| D | amdgpu_uvd.c | 243 struct fence *fence; in amdgpu_uvd_suspend() local 247 r = amdgpu_uvd_get_destroy_msg(ring, handle, &fence); in amdgpu_uvd_suspend() 253 fence_wait(fence, false); in amdgpu_uvd_suspend() 254 fence_put(fence); in amdgpu_uvd_suspend() 299 struct fence *fence; in amdgpu_uvd_free_handles() local 303 r = amdgpu_uvd_get_destroy_msg(ring, handle, &fence); in amdgpu_uvd_free_handles() 309 fence_wait(fence, false); in amdgpu_uvd_free_handles() 310 fence_put(fence); in amdgpu_uvd_free_handles() 841 struct fence **fence) in amdgpu_uvd_send_msg() argument 847 struct fence *f = NULL; in amdgpu_uvd_send_msg() [all …]
|
| D | amdgpu_ib.c | 96 amdgpu_sync_free(adev, &ib->sync, &ib->fence->base); in amdgpu_ib_free() 97 amdgpu_sa_bo_free(adev, &ib->sa_bo, &ib->fence->base); in amdgpu_ib_free() 98 if (ib->fence) in amdgpu_ib_free() 99 fence_put(&ib->fence->base); in amdgpu_ib_free() 198 r = amdgpu_fence_emit(ring, owner, &ib->fence); in amdgpu_ib_schedule() 208 &ib->fence->base); in amdgpu_ib_schedule() 219 amdgpu_vm_fence(adev, ib->vm, &ib->fence->base); in amdgpu_ib_schedule()
|
| D | amdgpu_vce.c | 369 struct fence **fence) in amdgpu_vce_get_create_msg() argument 373 struct fence *f = NULL; in amdgpu_vce_get_create_msg() 433 if (fence) in amdgpu_vce_get_create_msg() 434 *fence = fence_get(f); in amdgpu_vce_get_create_msg() 455 struct fence **fence) in amdgpu_vce_get_destroy_msg() argument 459 struct fence *f = NULL; in amdgpu_vce_get_destroy_msg() 500 if (fence) in amdgpu_vce_get_destroy_msg() 501 *fence = fence_get(f); in amdgpu_vce_get_destroy_msg() 853 struct fence *fence = NULL; in amdgpu_vce_ring_test_ib() local 866 r = amdgpu_vce_get_destroy_msg(ring, 1, &fence); in amdgpu_vce_ring_test_ib() [all …]
|
| D | amdgpu_semaphore.c | 90 struct fence *fence) in amdgpu_semaphore_free() argument 99 amdgpu_sa_bo_free(adev, &(*semaphore)->sa_bo, fence); in amdgpu_semaphore_free()
|
| D | amdgpu_trace.h | 58 __field(struct fence *, fence) 67 __entry->fence = &job->base.s_fence->base; 73 __entry->fence, __entry->ring_name, __entry->num_ibs) 83 __field(struct fence *, fence) 92 __entry->fence = &job->base.s_fence->base; 98 __entry->fence, __entry->ring_name, __entry->num_ibs)
|
| D | amdgpu.h | 412 struct fence base; 442 struct amdgpu_fence **fence); 448 bool amdgpu_fence_need_sync(struct amdgpu_fence *fence, 450 void amdgpu_fence_note_sync(struct amdgpu_fence *fence, 478 struct fence **fence); 502 struct fence *last_pt_update; 614 struct fence *fence; member 656 struct fence *fence); 663 struct fence *sync_to[AMDGPU_MAX_RINGS]; 665 struct fence *last_vm_update; [all …]
|
| D | amdgpu_object.h | 166 void amdgpu_bo_fence(struct amdgpu_bo *bo, struct fence *fence, 197 struct fence *fence);
|
| D | uvd_v6_0.c | 582 struct fence *fence = NULL; in uvd_v6_0_ring_test_ib() local 591 r = amdgpu_uvd_get_destroy_msg(ring, 1, &fence); in uvd_v6_0_ring_test_ib() 597 r = fence_wait(fence, false); in uvd_v6_0_ring_test_ib() 604 fence_put(fence); in uvd_v6_0_ring_test_ib()
|
| D | uvd_v5_0.c | 583 struct fence *fence = NULL; in uvd_v5_0_ring_test_ib() local 598 r = amdgpu_uvd_get_destroy_msg(ring, 1, &fence); in uvd_v5_0_ring_test_ib() 604 r = fence_wait(fence, false); in uvd_v5_0_ring_test_ib() 611 fence_put(fence); in uvd_v5_0_ring_test_ib()
|
| D | uvd_v4_2.c | 537 struct fence *fence = NULL; in uvd_v4_2_ring_test_ib() local 552 r = amdgpu_uvd_get_destroy_msg(ring, 1, &fence); in uvd_v4_2_ring_test_ib() 558 r = fence_wait(fence, false); in uvd_v4_2_ring_test_ib() 565 fence_put(fence); in uvd_v4_2_ring_test_ib()
|
| D | amdgpu_display.c | 39 struct fence **f) in amdgpu_flip_wait_fence() 41 struct amdgpu_fence *fence; in amdgpu_flip_wait_fence() local 47 fence = to_amdgpu_fence(*f); in amdgpu_flip_wait_fence() 48 if (fence) { in amdgpu_flip_wait_fence() 49 r = fence_wait(&fence->base, false); in amdgpu_flip_wait_fence()
|
| D | amdgpu_ttm.c | 231 struct fence *fence; in amdgpu_move_blit() local 270 bo->resv, &fence); in amdgpu_move_blit() 272 r = ttm_bo_move_accel_cleanup(bo, fence, in amdgpu_move_blit() 274 fence_put(fence); in amdgpu_move_blit() 1015 struct fence **fence) in amdgpu_copy_buffer() argument 1069 fence); in amdgpu_copy_buffer()
|
| D | amdgpu_object.c | 659 void amdgpu_bo_fence(struct amdgpu_bo *bo, struct fence *fence, in amdgpu_bo_fence() argument 665 reservation_object_add_shared_fence(resv, fence); in amdgpu_bo_fence() 667 reservation_object_add_excl_fence(resv, fence); in amdgpu_bo_fence()
|
| D | amdgpu_ring.c | 447 struct amdgpu_ring *amdgpu_ring_from_fence(struct fence *f) in amdgpu_ring_from_fence()
|
| D | cik_sdma.c | 631 struct fence *f = NULL; in cik_sdma_ring_test_ib()
|
| D | sdma_v2_4.c | 685 struct fence *f = NULL; in sdma_v2_4_ring_test_ib()
|
| D | sdma_v3_0.c | 835 struct fence *f = NULL; in sdma_v3_0_ring_test_ib()
|
| D | gfx_v8_0.c | 668 struct fence *f = NULL; in gfx_v8_0_ring_test_ib()
|
| D | gfx_v7_0.c | 2651 struct fence *f = NULL; in gfx_v7_0_ring_test_ib()
|
| /linux-4.4.14/drivers/gpu/drm/msm/ |
| D | msm_gem.h | 79 uint32_t fence = 0; in msm_gem_fence() local 82 fence = msm_obj->write_fence; in msm_gem_fence() 84 fence = max(fence, msm_obj->read_fence); in msm_gem_fence() 86 return fence; in msm_gem_fence() 102 uint32_t fence; member
|
| D | msm_gpu.c | 268 static void retire_submits(struct msm_gpu *gpu, uint32_t fence); 280 uint32_t fence = gpu->funcs->last_fence(gpu); in recover_worker() local 283 retire_submits(gpu, fence + 1); in recover_worker() 310 uint32_t fence = gpu->funcs->last_fence(gpu); in hangcheck_handler() local 312 if (fence != gpu->hangcheck_fence) { in hangcheck_handler() 314 gpu->hangcheck_fence = fence; in hangcheck_handler() 315 } else if (fence < gpu->submitted_fence) { in hangcheck_handler() 317 gpu->hangcheck_fence = fence; in hangcheck_handler() 321 gpu->name, fence); in hangcheck_handler() 434 static void retire_submits(struct msm_gpu *gpu, uint32_t fence) in retire_submits() argument [all …]
|
| D | msm_drv.h | 168 uint32_t fence; member 186 int msm_wait_fence(struct drm_device *dev, uint32_t fence, 189 struct msm_fence_cb *cb, uint32_t fence); 190 void msm_update_fence(struct drm_device *dev, uint32_t fence); 224 struct msm_gpu *gpu, bool write, uint32_t fence); 310 static inline bool fence_completed(struct drm_device *dev, uint32_t fence) in fence_completed() argument 313 return priv->completed_fence >= fence; in fence_completed()
|
| D | msm_atomic.c | 25 uint32_t fence; member 166 c->fence = max(c->fence, msm_gem_fence(to_msm_bo(obj), MSM_PREP_READ)); in add_fb() 280 msm_queue_fence_cb(dev, &c->fence_cb, c->fence); in msm_atomic_commit() 287 msm_wait_fence(dev, c->fence, &timeout, false); in msm_atomic_commit()
|
| D | msm_drv.c | 706 int msm_wait_fence(struct drm_device *dev, uint32_t fence, in msm_wait_fence() argument 715 if (fence > priv->gpu->submitted_fence) { in msm_wait_fence() 717 fence, priv->gpu->submitted_fence); in msm_wait_fence() 723 ret = fence_completed(dev, fence) ? 0 : -EBUSY; in msm_wait_fence() 738 fence_completed(dev, fence), in msm_wait_fence() 742 fence_completed(dev, fence), in msm_wait_fence() 747 fence, priv->completed_fence); in msm_wait_fence() 758 struct msm_fence_cb *cb, uint32_t fence) in msm_queue_fence_cb() argument 766 } else if (fence > priv->completed_fence) { in msm_queue_fence_cb() 767 cb->fence = fence; in msm_queue_fence_cb() [all …]
|
| D | msm_gem.c | 418 uint32_t fence = msm_gem_fence(msm_obj, in msm_gem_queue_inactive_cb() local 420 return msm_queue_fence_cb(obj->dev, cb, fence); in msm_gem_queue_inactive_cb() 424 struct msm_gpu *gpu, bool write, uint32_t fence) in msm_gem_move_to_active() argument 429 msm_obj->write_fence = fence; in msm_gem_move_to_active() 431 msm_obj->read_fence = fence; in msm_gem_move_to_active() 458 uint32_t fence = msm_gem_fence(msm_obj, op); in msm_gem_cpu_prep() local 463 ret = msm_wait_fence(dev, fence, timeout, true); in msm_gem_cpu_prep()
|
| D | msm_gem_submit.c | 419 args->fence = submit->fence; in msm_ioctl_gem_submit()
|
| D | msm_rd.c | 299 submit->fence); in msm_rd_dump_submit()
|
| /linux-4.4.14/arch/metag/include/asm/ |
| D | bitops.h | 20 fence(); in set_bit() 33 fence(); in clear_bit() 46 fence(); in change_bit() 62 fence(); in test_and_set_bit() 82 fence(); in test_and_clear_bit() 100 fence(); in test_and_change_bit()
|
| D | spinlock_lock1.h | 25 fence(); in arch_spin_lock() 44 fence(); in arch_spin_trylock() 74 fence(); in arch_write_lock() 92 fence(); in arch_write_trylock() 138 fence(); in arch_read_lock() 153 fence(); in arch_read_unlock() 167 fence(); in arch_read_trylock()
|
| D | barrier.h | 51 #define fence() do { } while (0) macro 66 static inline void fence(void) in fence() function 73 #define smp_mb() fence() 74 #define smp_rmb() fence() 77 #define fence() do { } while (0) macro
|
| D | atomic_lock1.h | 34 fence(); in atomic_set() 46 fence(); \ 60 fence(); \ 87 fence(); in atomic_cmpxchg() 105 fence(); in __atomic_add_unless() 121 fence(); in atomic_sub_if_positive()
|
| D | cmpxchg_lock1.h | 13 fence(); in xchg_u32() 25 fence(); in xchg_u8() 41 fence(); in __cmpxchg_u32()
|
| /linux-4.4.14/drivers/staging/android/trace/ |
| D | sync.h | 36 TP_PROTO(struct sync_fence *fence, int begin), 38 TP_ARGS(fence, begin), 41 __string(name, fence->name) 47 __assign_str(name, fence->name); 48 __entry->status = atomic_read(&fence->status); 57 TP_PROTO(struct fence *pt),
|
| /linux-4.4.14/drivers/gpu/drm/qxl/ |
| D | qxl_release.c | 43 static const char *qxl_get_driver_name(struct fence *fence) in qxl_get_driver_name() argument 48 static const char *qxl_get_timeline_name(struct fence *fence) in qxl_get_timeline_name() argument 53 static bool qxl_nop_signaling(struct fence *fence) in qxl_nop_signaling() argument 59 static long qxl_fence_wait(struct fence *fence, bool intr, signed long timeout) in qxl_fence_wait() argument 67 qdev = container_of(fence->lock, struct qxl_device, release_lock); in qxl_fence_wait() 68 release = container_of(fence, struct qxl_release, base); in qxl_fence_wait() 74 if (fence_is_signaled(fence)) in qxl_fence_wait() 83 if (fence_is_signaled(fence)) in qxl_fence_wait() 87 if (fence_is_signaled(fence)) in qxl_fence_wait() 99 FENCE_WARN(fence, "failed to wait on release %d " in qxl_fence_wait() [all …]
|
| D | qxl_debugfs.c | 65 fobj = rcu_dereference(bo->tbo.resv->fence); in qxl_debugfs_buffers_info()
|
| D | qxl_drv.h | 192 struct fence base;
|
| /linux-4.4.14/drivers/misc/mic/scif/ |
| D | scif_fence.c | 74 struct scif_remote_fence_info *fence; in scif_recv_wait() local 81 fence = kmalloc(sizeof(*fence), GFP_KERNEL); in scif_recv_wait() 82 if (!fence) { in scif_recv_wait() 90 memcpy(&fence->msg, msg, sizeof(struct scifmsg)); in scif_recv_wait() 91 INIT_LIST_HEAD(&fence->list); in scif_recv_wait() 96 list_add_tail(&fence->list, &scif_info.fence); in scif_recv_wait() 363 struct scif_remote_fence_info *fence; in scif_rma_handle_remote_fences() local 369 list_for_each_safe(item, tmp, &scif_info.fence) { in scif_rma_handle_remote_fences() 370 fence = list_entry(item, struct scif_remote_fence_info, in scif_rma_handle_remote_fences() 373 list_del(&fence->list); in scif_rma_handle_remote_fences() [all …]
|
| D | scif_main.h | 120 struct list_head fence; member
|
| D | scif_main.c | 282 INIT_LIST_HEAD(&scif_info.fence); in _scif_init()
|
| /linux-4.4.14/drivers/gpu/drm/i915/ |
| D | i915_gem_fence.c | 223 struct drm_i915_fence_reg *fence) in fence_number() argument 225 return fence - dev_priv->fence_regs; in fence_number() 229 struct drm_i915_fence_reg *fence, in i915_gem_object_update_fence() argument 233 int reg = fence_number(dev_priv, fence); in i915_gem_object_update_fence() 239 fence->obj = obj; in i915_gem_object_update_fence() 240 list_move_tail(&fence->lru_list, &dev_priv->mm.fence_list); in i915_gem_object_update_fence() 243 fence->obj = NULL; in i915_gem_object_update_fence() 244 list_del_init(&fence->lru_list); in i915_gem_object_update_fence() 290 struct drm_i915_fence_reg *fence; in i915_gem_object_put_fence() local 300 fence = &dev_priv->fence_regs[obj->fence_reg]; in i915_gem_object_put_fence() [all …]
|
| D | i915_gpu_error.c | 384 err_printf(m, " fence[%d] = %08llx\n", i, error->fence[i]); in i915_error_state_to_str() 796 error->fence[i] = I915_READ(FENCE_REG(i)); in i915_gem_record_fences() 799 error->fence[i] = I915_READ64(FENCE_REG_965_LO(i)); in i915_gem_record_fences() 802 error->fence[i] = I915_READ64(FENCE_REG_GEN6_LO(i)); in i915_gem_record_fences()
|
| D | i915_drv.h | 521 u64 fence[I915_MAX_NUM_FENCES]; member
|
| /linux-4.4.14/include/drm/ttm/ |
| D | ttm_execbuf_util.h | 117 struct fence *fence);
|
| D | ttm_bo_driver.h | 1017 struct fence *fence,
|
| /linux-4.4.14/fs/ocfs2/cluster/ |
| D | quorum.c | 108 int lowest_hb, lowest_reachable = 0, fence = 0; in o2quo_make_decision() local 135 fence = 1; in o2quo_make_decision() 149 fence = 1; in o2quo_make_decision() 158 fence = 1; in o2quo_make_decision() 163 if (fence) { in o2quo_make_decision()
|
| /linux-4.4.14/drivers/dma/ioat/ |
| D | hw.h | 85 unsigned int fence:1; member 119 unsigned int fence:1; member 166 unsigned int fence:1; member 215 unsigned int fence:1; member
|
| D | prep.c | 154 hw->ctl_f.fence = !!(flags & DMA_PREP_FENCE); in ioat_dma_prep_memcpy_lock() 239 xor->ctl_f.fence = !!(flags & DMA_PREP_FENCE); in __ioat_prep_xor_lock() 449 pq->ctl_f.fence = !!(flags & DMA_PREP_FENCE); in __ioat_prep_pq_lock() 563 pq->ctl_f.fence = !!(flags & DMA_PREP_FENCE); in __ioat_prep_pq16_lock() 735 hw->ctl_f.fence = !!(flags & DMA_PREP_FENCE); in ioat_prep_interrupt_lock()
|
| /linux-4.4.14/drivers/gpu/drm/ttm/ |
| D | ttm_execbuf_util.c | 183 struct list_head *list, struct fence *fence) in ttm_eu_fence_buffer_objects() argument 204 reservation_object_add_shared_fence(bo->resv, fence); in ttm_eu_fence_buffer_objects() 206 reservation_object_add_excl_fence(bo->resv, fence); in ttm_eu_fence_buffer_objects()
|
| D | ttm_bo.c | 409 struct fence *fence; in ttm_bo_flush_all_fences() local 413 fence = reservation_object_get_excl(bo->resv); in ttm_bo_flush_all_fences() 414 if (fence && !fence->ops->signaled) in ttm_bo_flush_all_fences() 415 fence_enable_sw_signaling(fence); in ttm_bo_flush_all_fences() 418 fence = rcu_dereference_protected(fobj->shared[i], in ttm_bo_flush_all_fences() 421 if (!fence->ops->signaled) in ttm_bo_flush_all_fences() 422 fence_enable_sw_signaling(fence); in ttm_bo_flush_all_fences() 1543 struct fence *excl; in ttm_bo_wait() 1561 struct fence *fence; in ttm_bo_wait() local 1562 fence = rcu_dereference_protected(fobj->shared[i], in ttm_bo_wait() [all …]
|
| D | ttm_bo_util.c | 635 struct fence *fence, in ttm_bo_move_accel_cleanup() argument 646 reservation_object_add_excl_fence(bo->resv, fence); in ttm_bo_move_accel_cleanup() 674 reservation_object_add_excl_fence(ghost_obj->resv, fence); in ttm_bo_move_accel_cleanup()
|
| /linux-4.4.14/drivers/gpu/drm/msm/adreno/ |
| D | adreno_gpu.c | 80 rbmemptr(adreno_gpu, fence)); in adreno_hw_init() 95 return adreno_gpu->memptrs->fence; in adreno_last_fence() 110 adreno_gpu->memptrs->fence = gpu->submitted_fence; in adreno_recover() 156 OUT_RING(ring, submit->fence); in adreno_submit() 172 OUT_RING(ring, rbmemptr(adreno_gpu, fence)); in adreno_submit() 173 OUT_RING(ring, submit->fence); in adreno_submit() 238 seq_printf(m, "fence: %d/%d\n", adreno_gpu->memptrs->fence, in adreno_show() 279 printk("fence: %d/%d\n", adreno_gpu->memptrs->fence, in adreno_dump_info()
|
| D | adreno_gpu.h | 133 volatile uint32_t fence; member
|
| /linux-4.4.14/drivers/gpu/host1x/ |
| D | cdma.c | 75 pb->fence = pb->size_bytes - 8; in host1x_pushbuffer_init() 101 WARN_ON(pos == pb->fence); in host1x_pushbuffer_push() 114 pb->fence = (pb->fence + slots * 8) & (pb->size_bytes - 1); in host1x_pushbuffer_pop() 122 return ((pb->fence - pb->pos) & (pb->size_bytes - 1)) / 8; in host1x_pushbuffer_space()
|
| D | cdma.h | 47 u32 fence; /* index we've written */ member
|
| /linux-4.4.14/include/uapi/drm/ |
| D | msm_drm.h | 179 __u32 fence; /* out */ member 194 __u32 fence; /* in */ member
|
| D | tegra_drm.h | 128 __u32 fence; /* Return value */ member
|
| /linux-4.4.14/Documentation/DocBook/ |
| D | device-drivers.xml.db | 413 API-fence-context-alloc 414 API-fence-signal-locked 415 API-fence-signal 416 API-fence-wait-timeout 417 API-fence-enable-sw-signaling 418 API-fence-add-callback 419 API-fence-remove-callback 420 API-fence-default-wait 421 API-fence-wait-any-timeout 422 API-fence-init [all …]
|
| D | .device-drivers.xml.cmd | 2 …dma-buf/dma-buf.c drivers/dma-buf/fence.c drivers/dma-buf/seqno-fence.c include/linux/fence.h incl…
|
| D | gpu.xml.db | 614 API-i915-gem-object-put-fence 615 API-i915-gem-object-get-fence 616 API-i915-gem-object-pin-fence 617 API-i915-gem-object-unpin-fence
|
| /linux-4.4.14/drivers/staging/android/uapi/ |
| D | sw_sync.h | 23 __s32 fence; /* fd of new fence */ member
|
| D | sync.h | 26 __s32 fence; /* fd on newly created fence */ member
|
| /linux-4.4.14/Documentation/filesystems/nfs/ |
| D | pnfs-block-server.txt | 21 If the nfsd server needs to fence a non-responding client it calls 36 echo "fencing client ${CLIENT} serial ${EVPD}" >> /var/log/pnfsd-fence.log
|
| /linux-4.4.14/drivers/gpu/drm/omapdrm/ |
| D | TODO | 13 . This can be handled by the dma-buf fence/reservation stuff when it
|
| /linux-4.4.14/arch/metag/kernel/ |
| D | head.S | 53 ! In case GCOn has just been turned on we need to fence any writes that
|
| /linux-4.4.14/drivers/gpu/drm/mga/ |
| D | mga_state.c | 1043 u32 *fence = data; in mga_set_fence() local 1057 *fence = dev_priv->next_fence_to_post; in mga_set_fence() 1073 u32 *fence = data; in mga_wait_fence() local 1082 mga_driver_fence_wait(dev, fence); in mga_wait_fence()
|
| /linux-4.4.14/drivers/infiniband/hw/mlx5/ |
| D | qp.c | 2517 static u8 get_fence(u8 fence, struct ib_send_wr *wr) in get_fence() argument 2523 if (unlikely(fence)) { in get_fence() 2527 return fence; in get_fence() 2566 int nreq, u8 fence, u8 next_fence, in finish_wqe() argument 2574 ctrl->fm_ce_se |= fence; in finish_wqe() 2608 u8 fence; in mlx5_ib_post_send() local 2620 fence = qp->fm_cache; in mlx5_ib_post_send() 2695 nreq, get_fence(fence, wr), in mlx5_ib_post_send() 2722 nreq, get_fence(fence, wr), in mlx5_ib_post_send() 2744 nreq, get_fence(fence, wr), in mlx5_ib_post_send() [all …]
|
| /linux-4.4.14/drivers/video/fbdev/intelfb/ |
| D | intelfb.h | 228 u32 fence[8]; member
|
| D | intelfbhw.c | 641 hw->fence[i] = INREG(FENCE + (i << 2)); in intelfbhw_read_hw_state() 860 hw->fence[i]); in intelfbhw_print_hw_state()
|
| /linux-4.4.14/include/drm/ |
| D | drm_crtc.h | 46 struct fence; 762 struct fence *fence; member
|
| /linux-4.4.14/drivers/gpu/drm/ |
| D | drm_atomic_helper.c | 905 if (!plane->state->fence) in wait_for_fences() 910 fence_wait(plane->state->fence, false); in wait_for_fences() 911 fence_put(plane->state->fence); in wait_for_fences() 912 plane->state->fence = NULL; in wait_for_fences()
|
| /linux-4.4.14/drivers/gpu/drm/vmwgfx/device_include/ |
| D | svga_reg.h | 1576 uint32 fence; member
|
| /linux-4.4.14/drivers/gpu/drm/tegra/ |
| D | drm.c | 416 args->fence = job->syncpt_end; in tegra_drm_submit()
|
| /linux-4.4.14/ |
| D | MAINTAINERS | 3536 F: include/linux/*fence.h
|