fence 185 drivers/dma-buf/dma-buf.c static void dma_buf_poll_cb(struct dma_fence *fence, struct dma_fence_cb *cb) fence 221 drivers/dma-buf/dma-buf.c fobj = rcu_dereference(resv->fence); fence 283 drivers/dma-buf/dma-buf.c struct dma_fence *fence = rcu_dereference(fobj->shared[i]); fence 285 drivers/dma-buf/dma-buf.c if (!dma_fence_get_rcu(fence)) { fence 296 drivers/dma-buf/dma-buf.c if (!dma_fence_add_callback(fence, &dcb->cb, fence 298 drivers/dma-buf/dma-buf.c dma_fence_put(fence); fence 302 drivers/dma-buf/dma-buf.c dma_fence_put(fence); fence 1161 drivers/dma-buf/dma-buf.c struct dma_fence *fence; fence 1196 drivers/dma-buf/dma-buf.c fobj = rcu_dereference(robj->fence); fence 1198 drivers/dma-buf/dma-buf.c fence = rcu_dereference(robj->fence_excl); fence 1204 drivers/dma-buf/dma-buf.c if (fence) fence 1206 drivers/dma-buf/dma-buf.c fence->ops->get_driver_name(fence), fence 1207 drivers/dma-buf/dma-buf.c fence->ops->get_timeline_name(fence), fence 1208 drivers/dma-buf/dma-buf.c dma_fence_is_signaled(fence) ? "" : "un"); fence 1210 drivers/dma-buf/dma-buf.c fence = rcu_dereference(fobj->shared[i]); fence 1211 drivers/dma-buf/dma-buf.c if (!dma_fence_get_rcu(fence)) fence 1214 drivers/dma-buf/dma-buf.c fence->ops->get_driver_name(fence), fence 1215 drivers/dma-buf/dma-buf.c fence->ops->get_timeline_name(fence), fence 1216 drivers/dma-buf/dma-buf.c dma_fence_is_signaled(fence) ? "" : "un"); fence 1217 drivers/dma-buf/dma-buf.c dma_fence_put(fence); fence 18 drivers/dma-buf/dma-fence-array.c static const char *dma_fence_array_get_driver_name(struct dma_fence *fence) fence 23 drivers/dma-buf/dma-fence-array.c static const char *dma_fence_array_get_timeline_name(struct dma_fence *fence) fence 70 drivers/dma-buf/dma-fence-array.c static bool dma_fence_array_enable_signaling(struct dma_fence *fence) fence 72 drivers/dma-buf/dma-fence-array.c struct dma_fence_array *array = to_dma_fence_array(fence); fence 103 drivers/dma-buf/dma-fence-array.c static bool dma_fence_array_signaled(struct dma_fence *fence) fence 105 drivers/dma-buf/dma-fence-array.c struct dma_fence_array *array = to_dma_fence_array(fence); fence 110 drivers/dma-buf/dma-fence-array.c static void dma_fence_array_release(struct dma_fence *fence) fence 112 drivers/dma-buf/dma-fence-array.c struct dma_fence_array *array = to_dma_fence_array(fence); fence 119 drivers/dma-buf/dma-fence-array.c dma_fence_free(fence); fence 188 drivers/dma-buf/dma-fence-array.c bool dma_fence_match_context(struct dma_fence *fence, u64 context) fence 190 drivers/dma-buf/dma-fence-array.c struct dma_fence_array *array = to_dma_fence_array(fence); fence 193 drivers/dma-buf/dma-fence-array.c if (!dma_fence_is_array(fence)) fence 194 drivers/dma-buf/dma-fence-array.c return fence->context == context; fence 12 drivers/dma-buf/dma-fence-chain.c static bool dma_fence_chain_enable_signaling(struct dma_fence *fence); fence 39 drivers/dma-buf/dma-fence-chain.c struct dma_fence *dma_fence_chain_walk(struct dma_fence *fence) fence 44 drivers/dma-buf/dma-fence-chain.c chain = to_dma_fence_chain(fence); fence 46 drivers/dma-buf/dma-fence-chain.c dma_fence_put(fence); fence 54 drivers/dma-buf/dma-fence-chain.c if (!dma_fence_is_signaled(prev_chain->fence)) fence 73 drivers/dma-buf/dma-fence-chain.c dma_fence_put(fence); fence 111 drivers/dma-buf/dma-fence-chain.c static const char *dma_fence_chain_get_driver_name(struct dma_fence *fence) fence 116 drivers/dma-buf/dma-fence-chain.c static const char *dma_fence_chain_get_timeline_name(struct dma_fence *fence) fence 143 drivers/dma-buf/dma-fence-chain.c static bool dma_fence_chain_enable_signaling(struct dma_fence *fence) fence 145 drivers/dma-buf/dma-fence-chain.c struct dma_fence_chain *head = to_dma_fence_chain(fence); fence 148 drivers/dma-buf/dma-fence-chain.c dma_fence_chain_for_each(fence, &head->base) { fence 149 drivers/dma-buf/dma-fence-chain.c struct dma_fence_chain *chain = to_dma_fence_chain(fence); fence 150 drivers/dma-buf/dma-fence-chain.c struct dma_fence *f = chain ? chain->fence : fence; fence 154 drivers/dma-buf/dma-fence-chain.c dma_fence_put(fence); fence 163 drivers/dma-buf/dma-fence-chain.c static bool dma_fence_chain_signaled(struct dma_fence *fence) fence 165 drivers/dma-buf/dma-fence-chain.c dma_fence_chain_for_each(fence, fence) { fence 166 drivers/dma-buf/dma-fence-chain.c struct dma_fence_chain *chain = to_dma_fence_chain(fence); fence 167 drivers/dma-buf/dma-fence-chain.c struct dma_fence *f = chain ? chain->fence : fence; fence 170 drivers/dma-buf/dma-fence-chain.c dma_fence_put(fence); fence 178 drivers/dma-buf/dma-fence-chain.c static void dma_fence_chain_release(struct dma_fence *fence) fence 180 drivers/dma-buf/dma-fence-chain.c struct dma_fence_chain *chain = to_dma_fence_chain(fence); fence 205 drivers/dma-buf/dma-fence-chain.c dma_fence_put(chain->fence); fence 206 drivers/dma-buf/dma-fence-chain.c dma_fence_free(fence); fence 230 drivers/dma-buf/dma-fence-chain.c struct dma_fence *fence, fence 238 drivers/dma-buf/dma-fence-chain.c chain->fence = fence; fence 67 drivers/dma-buf/dma-fence.c static const char *dma_fence_stub_get_name(struct dma_fence *fence) fence 129 drivers/dma-buf/dma-fence.c int dma_fence_signal_locked(struct dma_fence *fence) fence 134 drivers/dma-buf/dma-fence.c lockdep_assert_held(fence->lock); fence 137 drivers/dma-buf/dma-fence.c &fence->flags))) fence 141 drivers/dma-buf/dma-fence.c list_replace(&fence->cb_list, &cb_list); fence 143 drivers/dma-buf/dma-fence.c fence->timestamp = ktime_get(); fence 144 drivers/dma-buf/dma-fence.c set_bit(DMA_FENCE_FLAG_TIMESTAMP_BIT, &fence->flags); fence 145 drivers/dma-buf/dma-fence.c trace_dma_fence_signaled(fence); fence 149 drivers/dma-buf/dma-fence.c cur->func(fence, cur); fence 169 drivers/dma-buf/dma-fence.c int dma_fence_signal(struct dma_fence *fence) fence 174 drivers/dma-buf/dma-fence.c if (!fence) fence 177 drivers/dma-buf/dma-fence.c spin_lock_irqsave(fence->lock, flags); fence 178 drivers/dma-buf/dma-fence.c ret = dma_fence_signal_locked(fence); fence 179 drivers/dma-buf/dma-fence.c spin_unlock_irqrestore(fence->lock, flags); fence 204 drivers/dma-buf/dma-fence.c dma_fence_wait_timeout(struct dma_fence *fence, bool intr, signed long timeout) fence 211 drivers/dma-buf/dma-fence.c trace_dma_fence_wait_start(fence); fence 212 drivers/dma-buf/dma-fence.c if (fence->ops->wait) fence 213 drivers/dma-buf/dma-fence.c ret = fence->ops->wait(fence, intr, timeout); fence 215 drivers/dma-buf/dma-fence.c ret = dma_fence_default_wait(fence, intr, timeout); fence 216 drivers/dma-buf/dma-fence.c trace_dma_fence_wait_end(fence); fence 230 drivers/dma-buf/dma-fence.c struct dma_fence *fence = fence 233 drivers/dma-buf/dma-fence.c trace_dma_fence_destroy(fence); fence 235 drivers/dma-buf/dma-fence.c if (WARN(!list_empty(&fence->cb_list) && fence 236 drivers/dma-buf/dma-fence.c !test_bit(DMA_FENCE_FLAG_SIGNALED_BIT, &fence->flags), fence 238 drivers/dma-buf/dma-fence.c fence->ops->get_driver_name(fence), fence 239 drivers/dma-buf/dma-fence.c fence->ops->get_timeline_name(fence), fence 240 drivers/dma-buf/dma-fence.c fence->context, fence->seqno)) { fence 250 drivers/dma-buf/dma-fence.c spin_lock_irqsave(fence->lock, flags); fence 251 drivers/dma-buf/dma-fence.c fence->error = -EDEADLK; fence 252 drivers/dma-buf/dma-fence.c dma_fence_signal_locked(fence); fence 253 drivers/dma-buf/dma-fence.c spin_unlock_irqrestore(fence->lock, flags); fence 256 drivers/dma-buf/dma-fence.c if (fence->ops->release) fence 257 drivers/dma-buf/dma-fence.c fence->ops->release(fence); fence 259 drivers/dma-buf/dma-fence.c dma_fence_free(fence); fence 270 drivers/dma-buf/dma-fence.c void dma_fence_free(struct dma_fence *fence) fence 272 drivers/dma-buf/dma-fence.c kfree_rcu(fence, rcu); fence 284 drivers/dma-buf/dma-fence.c void dma_fence_enable_sw_signaling(struct dma_fence *fence) fence 289 drivers/dma-buf/dma-fence.c &fence->flags) && fence 290 drivers/dma-buf/dma-fence.c !test_bit(DMA_FENCE_FLAG_SIGNALED_BIT, &fence->flags) && fence 291 drivers/dma-buf/dma-fence.c fence->ops->enable_signaling) { fence 292 drivers/dma-buf/dma-fence.c trace_dma_fence_enable_signal(fence); fence 294 drivers/dma-buf/dma-fence.c spin_lock_irqsave(fence->lock, flags); fence 296 drivers/dma-buf/dma-fence.c if (!fence->ops->enable_signaling(fence)) fence 297 drivers/dma-buf/dma-fence.c dma_fence_signal_locked(fence); fence 299 drivers/dma-buf/dma-fence.c spin_unlock_irqrestore(fence->lock, flags); fence 329 drivers/dma-buf/dma-fence.c int dma_fence_add_callback(struct dma_fence *fence, struct dma_fence_cb *cb, fence 336 drivers/dma-buf/dma-fence.c if (WARN_ON(!fence || !func)) fence 339 drivers/dma-buf/dma-fence.c if (test_bit(DMA_FENCE_FLAG_SIGNALED_BIT, &fence->flags)) { fence 344 drivers/dma-buf/dma-fence.c spin_lock_irqsave(fence->lock, flags); fence 347 drivers/dma-buf/dma-fence.c &fence->flags); fence 349 drivers/dma-buf/dma-fence.c if (test_bit(DMA_FENCE_FLAG_SIGNALED_BIT, &fence->flags)) fence 351 drivers/dma-buf/dma-fence.c else if (!was_set && fence->ops->enable_signaling) { fence 352 drivers/dma-buf/dma-fence.c trace_dma_fence_enable_signal(fence); fence 354 drivers/dma-buf/dma-fence.c if (!fence->ops->enable_signaling(fence)) { fence 355 drivers/dma-buf/dma-fence.c dma_fence_signal_locked(fence); fence 362 drivers/dma-buf/dma-fence.c list_add_tail(&cb->node, &fence->cb_list); fence 365 drivers/dma-buf/dma-fence.c spin_unlock_irqrestore(fence->lock, flags); fence 383 drivers/dma-buf/dma-fence.c int dma_fence_get_status(struct dma_fence *fence) fence 388 drivers/dma-buf/dma-fence.c spin_lock_irqsave(fence->lock, flags); fence 389 drivers/dma-buf/dma-fence.c status = dma_fence_get_status_locked(fence); fence 390 drivers/dma-buf/dma-fence.c spin_unlock_irqrestore(fence->lock, flags); fence 415 drivers/dma-buf/dma-fence.c dma_fence_remove_callback(struct dma_fence *fence, struct dma_fence_cb *cb) fence 420 drivers/dma-buf/dma-fence.c spin_lock_irqsave(fence->lock, flags); fence 426 drivers/dma-buf/dma-fence.c spin_unlock_irqrestore(fence->lock, flags); fence 438 drivers/dma-buf/dma-fence.c dma_fence_default_wait_cb(struct dma_fence *fence, struct dma_fence_cb *cb) fence 459 drivers/dma-buf/dma-fence.c dma_fence_default_wait(struct dma_fence *fence, bool intr, signed long timeout) fence 466 drivers/dma-buf/dma-fence.c if (test_bit(DMA_FENCE_FLAG_SIGNALED_BIT, &fence->flags)) fence 469 drivers/dma-buf/dma-fence.c spin_lock_irqsave(fence->lock, flags); fence 477 drivers/dma-buf/dma-fence.c &fence->flags); fence 479 drivers/dma-buf/dma-fence.c if (test_bit(DMA_FENCE_FLAG_SIGNALED_BIT, &fence->flags)) fence 482 drivers/dma-buf/dma-fence.c if (!was_set && fence->ops->enable_signaling) { fence 483 drivers/dma-buf/dma-fence.c trace_dma_fence_enable_signal(fence); fence 485 drivers/dma-buf/dma-fence.c if (!fence->ops->enable_signaling(fence)) { fence 486 drivers/dma-buf/dma-fence.c dma_fence_signal_locked(fence); fence 498 drivers/dma-buf/dma-fence.c list_add(&cb.base.node, &fence->cb_list); fence 500 drivers/dma-buf/dma-fence.c while (!test_bit(DMA_FENCE_FLAG_SIGNALED_BIT, &fence->flags) && ret > 0) { fence 505 drivers/dma-buf/dma-fence.c spin_unlock_irqrestore(fence->lock, flags); fence 509 drivers/dma-buf/dma-fence.c spin_lock_irqsave(fence->lock, flags); fence 519 drivers/dma-buf/dma-fence.c spin_unlock_irqrestore(fence->lock, flags); fence 531 drivers/dma-buf/dma-fence.c struct dma_fence *fence = fences[i]; fence 532 drivers/dma-buf/dma-fence.c if (test_bit(DMA_FENCE_FLAG_SIGNALED_BIT, &fence->flags)) { fence 590 drivers/dma-buf/dma-fence.c struct dma_fence *fence = fences[i]; fence 593 drivers/dma-buf/dma-fence.c if (dma_fence_add_callback(fence, &cb[i].base, fence 646 drivers/dma-buf/dma-fence.c dma_fence_init(struct dma_fence *fence, const struct dma_fence_ops *ops, fence 652 drivers/dma-buf/dma-fence.c kref_init(&fence->refcount); fence 653 drivers/dma-buf/dma-fence.c fence->ops = ops; fence 654 drivers/dma-buf/dma-fence.c INIT_LIST_HEAD(&fence->cb_list); fence 655 drivers/dma-buf/dma-fence.c fence->lock = lock; fence 656 drivers/dma-buf/dma-fence.c fence->context = context; fence 657 drivers/dma-buf/dma-fence.c fence->seqno = seqno; fence 658 drivers/dma-buf/dma-fence.c fence->flags = 0UL; fence 659 drivers/dma-buf/dma-fence.c fence->error = 0; fence 661 drivers/dma-buf/dma-fence.c trace_dma_fence_init(fence); fence 108 drivers/dma-buf/dma-resv.c RCU_INIT_POINTER(obj->fence, NULL); fence 130 drivers/dma-buf/dma-resv.c fobj = rcu_dereference_protected(obj->fence, 1); fence 178 drivers/dma-buf/dma-resv.c struct dma_fence *fence; fence 180 drivers/dma-buf/dma-resv.c fence = rcu_dereference_protected(old->shared[i], fence 182 drivers/dma-buf/dma-resv.c if (dma_fence_is_signaled(fence)) fence 183 drivers/dma-buf/dma-resv.c RCU_INIT_POINTER(new->shared[--k], fence); fence 185 drivers/dma-buf/dma-resv.c RCU_INIT_POINTER(new->shared[j++], fence); fence 197 drivers/dma-buf/dma-resv.c rcu_assign_pointer(obj->fence, new); fence 204 drivers/dma-buf/dma-resv.c struct dma_fence *fence; fence 206 drivers/dma-buf/dma-resv.c fence = rcu_dereference_protected(new->shared[i], fence 208 drivers/dma-buf/dma-resv.c dma_fence_put(fence); fence 224 drivers/dma-buf/dma-resv.c void dma_resv_add_shared_fence(struct dma_resv *obj, struct dma_fence *fence) fence 230 drivers/dma-buf/dma-resv.c dma_fence_get(fence); fence 244 drivers/dma-buf/dma-resv.c if (old->context == fence->context || fence 254 drivers/dma-buf/dma-resv.c RCU_INIT_POINTER(fobj->shared[i], fence); fence 271 drivers/dma-buf/dma-resv.c void dma_resv_add_excl_fence(struct dma_resv *obj, struct dma_fence *fence) fence 283 drivers/dma-buf/dma-resv.c if (fence) fence 284 drivers/dma-buf/dma-resv.c dma_fence_get(fence); fence 289 drivers/dma-buf/dma-resv.c RCU_INIT_POINTER(obj->fence_excl, fence); fence 320 drivers/dma-buf/dma-resv.c src_list = rcu_dereference(src->fence); fence 333 drivers/dma-buf/dma-resv.c src_list = rcu_dereference(src->fence); fence 341 drivers/dma-buf/dma-resv.c struct dma_fence *fence; fence 343 drivers/dma-buf/dma-resv.c fence = rcu_dereference(src_list->shared[i]); fence 345 drivers/dma-buf/dma-resv.c &fence->flags)) fence 348 drivers/dma-buf/dma-resv.c if (!dma_fence_get_rcu(fence)) { fence 350 drivers/dma-buf/dma-resv.c src_list = rcu_dereference(src->fence); fence 354 drivers/dma-buf/dma-resv.c if (dma_fence_is_signaled(fence)) { fence 355 drivers/dma-buf/dma-resv.c dma_fence_put(fence); fence 359 drivers/dma-buf/dma-resv.c rcu_assign_pointer(dst_list->shared[dst_list->shared_count++], fence); fence 375 drivers/dma-buf/dma-resv.c RCU_INIT_POINTER(dst->fence, dst_list); fence 423 drivers/dma-buf/dma-resv.c fobj = rcu_dereference(obj->fence); fence 503 drivers/dma-buf/dma-resv.c struct dma_fence *fence; fence 514 drivers/dma-buf/dma-resv.c fence = rcu_dereference(obj->fence_excl); fence 515 drivers/dma-buf/dma-resv.c if (fence && !test_bit(DMA_FENCE_FLAG_SIGNALED_BIT, &fence->flags)) { fence 516 drivers/dma-buf/dma-resv.c if (!dma_fence_get_rcu(fence)) fence 519 drivers/dma-buf/dma-resv.c if (dma_fence_is_signaled(fence)) { fence 520 drivers/dma-buf/dma-resv.c dma_fence_put(fence); fence 521 drivers/dma-buf/dma-resv.c fence = NULL; fence 525 drivers/dma-buf/dma-resv.c fence = NULL; fence 529 drivers/dma-buf/dma-resv.c struct dma_resv_list *fobj = rcu_dereference(obj->fence); fence 534 drivers/dma-buf/dma-resv.c for (i = 0; !fence && i < shared_count; ++i) { fence 549 drivers/dma-buf/dma-resv.c fence = lfence; fence 555 drivers/dma-buf/dma-resv.c if (fence) { fence 557 drivers/dma-buf/dma-resv.c dma_fence_put(fence); fence 561 drivers/dma-buf/dma-resv.c ret = dma_fence_wait_timeout(fence, intr, ret); fence 562 drivers/dma-buf/dma-resv.c dma_fence_put(fence); fence 577 drivers/dma-buf/dma-resv.c struct dma_fence *fence, *lfence = passed_fence; fence 581 drivers/dma-buf/dma-resv.c fence = dma_fence_get_rcu(lfence); fence 582 drivers/dma-buf/dma-resv.c if (!fence) fence 585 drivers/dma-buf/dma-resv.c ret = !!dma_fence_is_signaled(fence); fence 586 drivers/dma-buf/dma-resv.c dma_fence_put(fence); fence 615 drivers/dma-buf/dma-resv.c struct dma_resv_list *fobj = rcu_dereference(obj->fence); fence 621 drivers/dma-buf/dma-resv.c struct dma_fence *fence = rcu_dereference(fobj->shared[i]); fence 623 drivers/dma-buf/dma-resv.c ret = dma_resv_test_signaled_single(fence); fence 16 drivers/dma-buf/seqno-fence.c static const char *seqno_fence_get_driver_name(struct dma_fence *fence) fence 18 drivers/dma-buf/seqno-fence.c struct seqno_fence *seqno_fence = to_seqno_fence(fence); fence 20 drivers/dma-buf/seqno-fence.c return seqno_fence->ops->get_driver_name(fence); fence 23 drivers/dma-buf/seqno-fence.c static const char *seqno_fence_get_timeline_name(struct dma_fence *fence) fence 25 drivers/dma-buf/seqno-fence.c struct seqno_fence *seqno_fence = to_seqno_fence(fence); fence 27 drivers/dma-buf/seqno-fence.c return seqno_fence->ops->get_timeline_name(fence); fence 30 drivers/dma-buf/seqno-fence.c static bool seqno_enable_signaling(struct dma_fence *fence) fence 32 drivers/dma-buf/seqno-fence.c struct seqno_fence *seqno_fence = to_seqno_fence(fence); fence 34 drivers/dma-buf/seqno-fence.c return seqno_fence->ops->enable_signaling(fence); fence 37 drivers/dma-buf/seqno-fence.c static bool seqno_signaled(struct dma_fence *fence) fence 39 drivers/dma-buf/seqno-fence.c struct seqno_fence *seqno_fence = to_seqno_fence(fence); fence 41 drivers/dma-buf/seqno-fence.c return seqno_fence->ops->signaled && seqno_fence->ops->signaled(fence); fence 44 drivers/dma-buf/seqno-fence.c static void seqno_release(struct dma_fence *fence) fence 46 drivers/dma-buf/seqno-fence.c struct seqno_fence *f = to_seqno_fence(fence); fence 50 drivers/dma-buf/seqno-fence.c f->ops->release(fence); fence 55 drivers/dma-buf/seqno-fence.c static signed long seqno_wait(struct dma_fence *fence, bool intr, fence 58 drivers/dma-buf/seqno-fence.c struct seqno_fence *f = to_seqno_fence(fence); fence 60 drivers/dma-buf/seqno-fence.c return f->ops->wait(fence, intr, timeout); fence 52 drivers/dma-buf/sw_sync.c __s32 fence; /* fd of new fence */ fence 64 drivers/dma-buf/sw_sync.c static inline struct sync_pt *dma_fence_to_sync_pt(struct dma_fence *fence) fence 66 drivers/dma-buf/sw_sync.c if (fence->ops != &timeline_fence_ops) fence 68 drivers/dma-buf/sw_sync.c return container_of(fence, struct sync_pt, base); fence 119 drivers/dma-buf/sw_sync.c static const char *timeline_fence_get_driver_name(struct dma_fence *fence) fence 124 drivers/dma-buf/sw_sync.c static const char *timeline_fence_get_timeline_name(struct dma_fence *fence) fence 126 drivers/dma-buf/sw_sync.c struct sync_timeline *parent = dma_fence_parent(fence); fence 131 drivers/dma-buf/sw_sync.c static void timeline_fence_release(struct dma_fence *fence) fence 133 drivers/dma-buf/sw_sync.c struct sync_pt *pt = dma_fence_to_sync_pt(fence); fence 134 drivers/dma-buf/sw_sync.c struct sync_timeline *parent = dma_fence_parent(fence); fence 137 drivers/dma-buf/sw_sync.c spin_lock_irqsave(fence->lock, flags); fence 142 drivers/dma-buf/sw_sync.c spin_unlock_irqrestore(fence->lock, flags); fence 145 drivers/dma-buf/sw_sync.c dma_fence_free(fence); fence 148 drivers/dma-buf/sw_sync.c static bool timeline_fence_signaled(struct dma_fence *fence) fence 150 drivers/dma-buf/sw_sync.c struct sync_timeline *parent = dma_fence_parent(fence); fence 152 drivers/dma-buf/sw_sync.c return !__dma_fence_is_later(fence->seqno, parent->value, fence->ops); fence 155 drivers/dma-buf/sw_sync.c static bool timeline_fence_enable_signaling(struct dma_fence *fence) fence 160 drivers/dma-buf/sw_sync.c static void timeline_fence_value_str(struct dma_fence *fence, fence 163 drivers/dma-buf/sw_sync.c snprintf(str, size, "%lld", fence->seqno); fence 166 drivers/dma-buf/sw_sync.c static void timeline_fence_timeline_value_str(struct dma_fence *fence, fence 169 drivers/dma-buf/sw_sync.c struct sync_timeline *parent = dma_fence_parent(fence); fence 357 drivers/dma-buf/sw_sync.c data.fence = fd; fence 66 drivers/dma-buf/sync_debug.c struct dma_fence *fence, bool show) fence 68 drivers/dma-buf/sync_debug.c struct sync_timeline *parent = dma_fence_parent(fence); fence 71 drivers/dma-buf/sync_debug.c status = dma_fence_get_status_locked(fence); fence 78 drivers/dma-buf/sync_debug.c if (test_bit(DMA_FENCE_FLAG_TIMESTAMP_BIT, &fence->flags)) { fence 80 drivers/dma-buf/sync_debug.c ktime_to_timespec64(fence->timestamp); fence 85 drivers/dma-buf/sync_debug.c if (fence->ops->timeline_value_str && fence 86 drivers/dma-buf/sync_debug.c fence->ops->fence_value_str) { fence 90 drivers/dma-buf/sync_debug.c fence->ops->fence_value_str(fence, value, sizeof(value)); fence 96 drivers/dma-buf/sync_debug.c fence->ops->timeline_value_str(fence, value, fence 129 drivers/dma-buf/sync_debug.c sync_status_str(dma_fence_get_status(sync_file->fence))); fence 131 drivers/dma-buf/sync_debug.c if (dma_fence_is_array(sync_file->fence)) { fence 132 drivers/dma-buf/sync_debug.c struct dma_fence_array *array = to_dma_fence_array(sync_file->fence); fence 137 drivers/dma-buf/sync_debug.c sync_print_fence(s, sync_file->fence, true); fence 48 drivers/dma-buf/sync_debug.h static inline struct sync_timeline *dma_fence_parent(struct dma_fence *fence) fence 50 drivers/dma-buf/sync_debug.h return container_of(fence->lock, struct sync_timeline, lock); fence 69 drivers/dma-buf/sync_debug.h void sync_file_debug_add(struct sync_file *fence); fence 70 drivers/dma-buf/sync_debug.h void sync_file_debug_remove(struct sync_file *fence); fence 64 drivers/dma-buf/sync_file.c struct sync_file *sync_file_create(struct dma_fence *fence) fence 72 drivers/dma-buf/sync_file.c sync_file->fence = dma_fence_get(fence); fence 105 drivers/dma-buf/sync_file.c struct dma_fence *fence; fence 111 drivers/dma-buf/sync_file.c fence = dma_fence_get(sync_file->fence); fence 114 drivers/dma-buf/sync_file.c return fence; fence 136 drivers/dma-buf/sync_file.c struct dma_fence *fence = sync_file->fence; fence 139 drivers/dma-buf/sync_file.c fence->ops->get_driver_name(fence), fence 140 drivers/dma-buf/sync_file.c fence->ops->get_timeline_name(fence), fence 141 drivers/dma-buf/sync_file.c fence->context, fence 142 drivers/dma-buf/sync_file.c fence->seqno); fence 160 drivers/dma-buf/sync_file.c sync_file->fence = fences[0]; fence 169 drivers/dma-buf/sync_file.c sync_file->fence = &array->base; fence 178 drivers/dma-buf/sync_file.c if (dma_fence_is_array(sync_file->fence)) { fence 179 drivers/dma-buf/sync_file.c struct dma_fence_array *array = to_dma_fence_array(sync_file->fence); fence 186 drivers/dma-buf/sync_file.c return &sync_file->fence; fence 190 drivers/dma-buf/sync_file.c int *i, struct dma_fence *fence) fence 192 drivers/dma-buf/sync_file.c fences[*i] = fence; fence 194 drivers/dma-buf/sync_file.c if (!dma_fence_is_signaled(fence)) { fence 195 drivers/dma-buf/sync_file.c dma_fence_get(fence); fence 300 drivers/dma-buf/sync_file.c dma_fence_remove_callback(sync_file->fence, &sync_file->cb); fence 301 drivers/dma-buf/sync_file.c dma_fence_put(sync_file->fence); fence 315 drivers/dma-buf/sync_file.c if (dma_fence_add_callback(sync_file->fence, &sync_file->cb, fence 320 drivers/dma-buf/sync_file.c return dma_fence_is_signaled(sync_file->fence) ? EPOLLIN : 0; fence 357 drivers/dma-buf/sync_file.c data.fence = fd; fence 378 drivers/dma-buf/sync_file.c static int sync_fill_fence_info(struct dma_fence *fence, fence 381 drivers/dma-buf/sync_file.c strlcpy(info->obj_name, fence->ops->get_timeline_name(fence), fence 383 drivers/dma-buf/sync_file.c strlcpy(info->driver_name, fence->ops->get_driver_name(fence), fence 386 drivers/dma-buf/sync_file.c info->status = dma_fence_get_status(fence); fence 387 drivers/dma-buf/sync_file.c while (test_bit(DMA_FENCE_FLAG_SIGNALED_BIT, &fence->flags) && fence 388 drivers/dma-buf/sync_file.c !test_bit(DMA_FENCE_FLAG_TIMESTAMP_BIT, &fence->flags)) fence 391 drivers/dma-buf/sync_file.c test_bit(DMA_FENCE_FLAG_TIMESTAMP_BIT, &fence->flags) ? fence 392 drivers/dma-buf/sync_file.c ktime_to_ns(fence->timestamp) : fence 422 drivers/dma-buf/sync_file.c info.status = dma_fence_get_status(sync_file->fence); fence 80 drivers/dma/ioat/hw.h unsigned int fence:1; fence 114 drivers/dma/ioat/hw.h unsigned int fence:1; fence 161 drivers/dma/ioat/hw.h unsigned int fence:1; fence 210 drivers/dma/ioat/hw.h unsigned int fence:1; fence 142 drivers/dma/ioat/prep.c hw->ctl_f.fence = !!(flags & DMA_PREP_FENCE); fence 227 drivers/dma/ioat/prep.c xor->ctl_f.fence = !!(flags & DMA_PREP_FENCE); fence 437 drivers/dma/ioat/prep.c pq->ctl_f.fence = !!(flags & DMA_PREP_FENCE); fence 551 drivers/dma/ioat/prep.c pq->ctl_f.fence = !!(flags & DMA_PREP_FENCE); fence 723 drivers/dma/ioat/prep.c hw->ctl_f.fence = !!(flags & DMA_PREP_FENCE); fence 373 drivers/gpu/drm/amd/amdgpu/amdgpu.h struct dma_fence *fence; fence 475 drivers/gpu/drm/amd/amdgpu/amdgpu.h struct dma_fence *fence; fence 255 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.h struct dma_fence *fence); fence 65 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_fence.c struct amdgpu_amdkfd_fence *fence; fence 67 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_fence.c fence = kzalloc(sizeof(*fence), GFP_KERNEL); fence 68 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_fence.c if (fence == NULL) fence 73 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_fence.c fence->mm = mm; fence 74 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_fence.c get_task_comm(fence->timeline_name, current); fence 75 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_fence.c spin_lock_init(&fence->lock); fence 77 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_fence.c dma_fence_init(&fence->base, &amdkfd_fence_ops, &fence->lock, fence 80 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_fence.c return fence; fence 85 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_fence.c struct amdgpu_amdkfd_fence *fence; fence 90 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_fence.c fence = container_of(f, struct amdgpu_amdkfd_fence, base); fence 91 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_fence.c if (fence && f->ops == &amdkfd_fence_ops) fence 92 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_fence.c return fence; fence 104 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_fence.c struct amdgpu_amdkfd_fence *fence = to_amdgpu_amdkfd_fence(f); fence 106 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_fence.c return fence->timeline_name; fence 117 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_fence.c struct amdgpu_amdkfd_fence *fence = to_amdgpu_amdkfd_fence(f); fence 119 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_fence.c if (!fence) fence 125 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_fence.c if (!kgd2kfd_schedule_evict_and_restore_process(fence->mm, f)) fence 141 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_fence.c struct amdgpu_amdkfd_fence *fence = to_amdgpu_amdkfd_fence(f); fence 146 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_fence.c if (WARN_ON(!fence)) fence 149 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_fence.c mmdrop(fence->mm); fence 162 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_fence.c struct amdgpu_amdkfd_fence *fence = to_amdgpu_amdkfd_fence(f); fence 164 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_fence.c if (!fence) fence 166 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_fence.c else if (fence->mm == mm) fence 257 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c RCU_INIT_POINTER(resv->fence, new); fence 36 drivers/gpu/drm/amd/amdgpu/amdgpu_benchmark.c struct dma_fence *fence = NULL; fence 42 drivers/gpu/drm/amd/amdgpu/amdgpu_benchmark.c r = amdgpu_copy_buffer(ring, saddr, daddr, size, NULL, &fence, fence 46 drivers/gpu/drm/amd/amdgpu/amdgpu_benchmark.c r = dma_fence_wait(fence, false); fence 49 drivers/gpu/drm/amd/amdgpu/amdgpu_benchmark.c dma_fence_put(fence); fence 55 drivers/gpu/drm/amd/amdgpu/amdgpu_benchmark.c if (fence) fence 56 drivers/gpu/drm/amd/amdgpu/amdgpu_benchmark.c dma_fence_put(fence); fence 763 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c dma_fence_put(parser->fence); fence 1029 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c struct dma_fence *fence; fence 1043 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c fence = amdgpu_ctx_get_fence(ctx, entity, deps[i].handle); fence 1046 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c if (IS_ERR(fence)) fence 1047 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c return PTR_ERR(fence); fence 1048 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c else if (!fence) fence 1053 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c struct dma_fence *old = fence; fence 1055 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c s_fence = to_drm_sched_fence(fence); fence 1056 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c fence = dma_fence_get(&s_fence->scheduled); fence 1060 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c r = amdgpu_sync_fence(p->adev, &p->job->sync, fence, true); fence 1061 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c dma_fence_put(fence); fence 1072 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c struct dma_fence *fence; fence 1075 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c r = drm_syncobj_find_fence(p->filp, handle, point, flags, &fence); fence 1082 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c r = amdgpu_sync_fence(p->adev, &p->job->sync, fence, true); fence 1083 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c dma_fence_put(fence); fence 1262 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c p->fence, p->post_deps[i].point); fence 1266 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c p->fence); fence 1310 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c p->fence = dma_fence_get(&job->base.s_fence->finished); fence 1312 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c amdgpu_ctx_add_fence(p->ctx, entity, p->fence, &seq); fence 1336 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c ttm_eu_fence_buffer_objects(&p->ticket, &p->validated, p->fence); fence 1422 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c struct dma_fence *fence; fence 1436 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c fence = amdgpu_ctx_get_fence(ctx, entity, wait->in.handle); fence 1437 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c if (IS_ERR(fence)) fence 1438 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c r = PTR_ERR(fence); fence 1439 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c else if (fence) { fence 1440 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c r = dma_fence_wait_timeout(fence, true, timeout); fence 1441 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c if (r > 0 && fence->error) fence 1442 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c r = fence->error; fence 1443 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c dma_fence_put(fence); fence 1470 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c struct dma_fence *fence; fence 1484 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c fence = amdgpu_ctx_get_fence(ctx, entity, user->seq_no); fence 1487 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c return fence; fence 1495 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c struct dma_fence *fence; fence 1500 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c fence = amdgpu_cs_get_fence(adev, filp, &info->in.fence); fence 1501 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c if (IS_ERR(fence)) fence 1502 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c return PTR_ERR(fence); fence 1504 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c if (!fence) fence 1505 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c fence = dma_fence_get_stub(); fence 1509 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c r = drm_syncobj_create(&syncobj, 0, fence); fence 1510 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c dma_fence_put(fence); fence 1518 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c r = drm_syncobj_create(&syncobj, 0, fence); fence 1519 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c dma_fence_put(fence); fence 1529 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c dma_fence_put(fence); fence 1533 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c sync_file = sync_file_create(fence); fence 1534 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c dma_fence_put(fence); fence 1567 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c struct dma_fence *fence; fence 1570 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c fence = amdgpu_cs_get_fence(adev, filp, &fences[i]); fence 1571 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c if (IS_ERR(fence)) fence 1572 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c return PTR_ERR(fence); fence 1573 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c else if (!fence) fence 1576 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c r = dma_fence_wait_timeout(fence, true, timeout); fence 1577 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c dma_fence_put(fence); fence 1584 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c if (fence->error) fence 1585 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c return fence->error; fence 1621 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c struct dma_fence *fence; fence 1623 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c fence = amdgpu_cs_get_fence(adev, filp, &fences[i]); fence 1624 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c if (IS_ERR(fence)) { fence 1625 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c r = PTR_ERR(fence); fence 1627 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c } else if (fence) { fence 1628 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c array[i] = fence; fence 468 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c struct dma_fence *fence, uint64_t* handle) fence 480 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c dma_fence_get(fence); fence 483 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c centity->fences[idx] = fence; fence 497 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c struct dma_fence *fence; fence 515 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c fence = dma_fence_get(centity->fences[seq & (amdgpu_sched_jobs - 1)]); fence 518 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c return fence; fence 72 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.h struct dma_fence *fence, uint64_t *seq); fence 945 drivers/gpu/drm/amd/amdgpu/amdgpu_debugfs.c struct dma_fence *fence, **ptr; fence 951 drivers/gpu/drm/amd/amdgpu/amdgpu_debugfs.c fence = rcu_dereference_protected(*ptr, 1); fence 954 drivers/gpu/drm/amd/amdgpu/amdgpu_debugfs.c if (!fence) fence 957 drivers/gpu/drm/amd/amdgpu/amdgpu_debugfs.c fences[last_seq] = fence; fence 966 drivers/gpu/drm/amd/amdgpu/amdgpu_debugfs.c struct dma_fence *fence; fence 969 drivers/gpu/drm/amd/amdgpu/amdgpu_debugfs.c fence = fences[i]; fence 970 drivers/gpu/drm/amd/amdgpu/amdgpu_debugfs.c if (!fence) fence 972 drivers/gpu/drm/amd/amdgpu/amdgpu_debugfs.c dma_fence_signal(fence); fence 973 drivers/gpu/drm/amd/amdgpu/amdgpu_debugfs.c dma_fence_put(fence); fence 980 drivers/gpu/drm/amd/amdgpu/amdgpu_debugfs.c struct dma_fence *fence; fence 984 drivers/gpu/drm/amd/amdgpu/amdgpu_debugfs.c fence = sched->ops->run_job(s_job); fence 985 drivers/gpu/drm/amd/amdgpu/amdgpu_debugfs.c dma_fence_put(fence); fence 995 drivers/gpu/drm/amd/amdgpu/amdgpu_debugfs.c struct dma_fence *fence, **ptr; fence 1008 drivers/gpu/drm/amd/amdgpu/amdgpu_debugfs.c fence = rcu_dereference_protected(*ptr, 1); fence 1013 drivers/gpu/drm/amd/amdgpu/amdgpu_debugfs.c if (job->fence == fence) fence 3394 drivers/gpu/drm/amd/amdgpu/amdgpu_device.c struct dma_fence *fence = NULL, *next = NULL; fence 3417 drivers/gpu/drm/amd/amdgpu/amdgpu_device.c if (fence) { fence 3418 drivers/gpu/drm/amd/amdgpu/amdgpu_device.c tmo = dma_fence_wait_timeout(fence, false, tmo); fence 3419 drivers/gpu/drm/amd/amdgpu/amdgpu_device.c dma_fence_put(fence); fence 3420 drivers/gpu/drm/amd/amdgpu/amdgpu_device.c fence = next; fence 3429 drivers/gpu/drm/amd/amdgpu/amdgpu_device.c fence = next; fence 3434 drivers/gpu/drm/amd/amdgpu/amdgpu_device.c if (fence) fence 3435 drivers/gpu/drm/amd/amdgpu/amdgpu_device.c tmo = dma_fence_wait_timeout(fence, false, tmo); fence 3436 drivers/gpu/drm/amd/amdgpu/amdgpu_device.c dma_fence_put(fence); fence 56 drivers/gpu/drm/amd/amdgpu/amdgpu_display.c struct dma_fence *fence= *f; fence 58 drivers/gpu/drm/amd/amdgpu/amdgpu_display.c if (fence == NULL) fence 63 drivers/gpu/drm/amd/amdgpu/amdgpu_display.c if (!dma_fence_add_callback(fence, &work->cb, fence 67 drivers/gpu/drm/amd/amdgpu/amdgpu_display.c dma_fence_put(fence); fence 140 drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c struct amdgpu_fence *fence; fence 145 drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c fence = kmem_cache_alloc(amdgpu_fence_slab, GFP_KERNEL); fence 146 drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c if (fence == NULL) fence 150 drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c fence->ring = ring; fence 151 drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c dma_fence_init(&fence->base, &amdgpu_fence_ops, fence 177 drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c rcu_assign_pointer(*ptr, dma_fence_get(&fence->base)); fence 179 drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c *f = &fence->base; fence 257 drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c struct dma_fence *fence, **ptr; fence 264 drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c fence = rcu_dereference_protected(*ptr, 1); fence 267 drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c if (!fence) fence 270 drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c r = dma_fence_signal(fence); fence 272 drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c DMA_FENCE_TRACE(fence, "signaled from irq context\n"); fence 276 drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c dma_fence_put(fence); fence 310 drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c struct dma_fence *fence, **ptr; fence 318 drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c fence = rcu_dereference(*ptr); fence 319 drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c if (!fence || !dma_fence_get_rcu(fence)) { fence 325 drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c r = dma_fence_wait(fence, false); fence 326 drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c dma_fence_put(fence); fence 627 drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c static const char *amdgpu_fence_get_driver_name(struct dma_fence *fence) fence 634 drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c struct amdgpu_fence *fence = to_amdgpu_fence(f); fence 635 drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c return (const char *)fence->ring->name; fence 648 drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c struct amdgpu_fence *fence = to_amdgpu_fence(f); fence 649 drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c struct amdgpu_ring *ring = fence->ring; fence 654 drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c DMA_FENCE_TRACE(&fence->base, "armed on ring %i!\n", ring->idx); fence 669 drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c struct amdgpu_fence *fence = to_amdgpu_fence(f); fence 670 drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c kmem_cache_free(amdgpu_fence_slab, fence); fence 189 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c struct dma_fence *fence = NULL; fence 191 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c r = amdgpu_vm_clear_freed(adev, vm, &fence); fence 197 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c if (fence) { fence 198 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c amdgpu_bo_fence(bo, fence, true); fence 199 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c dma_fence_put(fence); fence 88 drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c static void amdgpu_pasid_free_cb(struct dma_fence *fence, fence 95 drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c dma_fence_put(fence); fence 110 drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c struct dma_fence *fence, **fences; fence 125 drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c fence = fences[0]; fence 137 drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c fence = &array->base; fence 143 drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c dma_fence_wait(fence, false); fence 144 drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c dma_fence_put(fence); fence 148 drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c if (dma_fence_add_callback(fence, &cb->cb, fence 150 drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c amdgpu_pasid_free_cb(fence, &cb->cb); fence 268 drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c struct dma_fence *fence, fence 306 drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c r = amdgpu_sync_fence(ring->adev, &(*id)->active, fence, false); fence 333 drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c struct dma_fence *fence, fence 378 drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c r = amdgpu_sync_fence(ring->adev, &(*id)->active, fence, false); fence 407 drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c struct amdgpu_sync *sync, struct dma_fence *fence, fence 423 drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c r = amdgpu_vmid_grab_reserved(vm, ring, sync, fence, job, &id); fence 427 drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c r = amdgpu_vmid_grab_used(vm, ring, sync, fence, job, &id); fence 439 drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c fence, false); fence 87 drivers/gpu/drm/amd/amdgpu/amdgpu_ids.h struct amdgpu_sync *sync, struct dma_fence *fence, fence 112 drivers/gpu/drm/amd/amdgpu/amdgpu_job.c f = job->base.s_fence ? &job->base.s_fence->finished : job->fence; fence 126 drivers/gpu/drm/amd/amdgpu/amdgpu_job.c dma_fence_put(job->fence); fence 136 drivers/gpu/drm/amd/amdgpu/amdgpu_job.c dma_fence_put(job->fence); fence 169 drivers/gpu/drm/amd/amdgpu/amdgpu_job.c struct dma_fence **fence) fence 174 drivers/gpu/drm/amd/amdgpu/amdgpu_job.c r = amdgpu_ib_schedule(ring, job->num_ibs, job->ibs, NULL, fence); fence 175 drivers/gpu/drm/amd/amdgpu/amdgpu_job.c job->fence = dma_fence_get(*fence); fence 189 drivers/gpu/drm/amd/amdgpu/amdgpu_job.c struct dma_fence *fence; fence 193 drivers/gpu/drm/amd/amdgpu/amdgpu_job.c fence = amdgpu_sync_get_fence(&job->sync, &explicit); fence 194 drivers/gpu/drm/amd/amdgpu/amdgpu_job.c if (fence && explicit) { fence 195 drivers/gpu/drm/amd/amdgpu/amdgpu_job.c if (drm_sched_dependency_optimized(fence, s_entity)) { fence 197 drivers/gpu/drm/amd/amdgpu/amdgpu_job.c fence, false); fence 203 drivers/gpu/drm/amd/amdgpu/amdgpu_job.c while (fence == NULL && vm && !job->vmid) { fence 210 drivers/gpu/drm/amd/amdgpu/amdgpu_job.c fence = amdgpu_sync_get_fence(&job->sync, NULL); fence 213 drivers/gpu/drm/amd/amdgpu/amdgpu_job.c return fence; fence 219 drivers/gpu/drm/amd/amdgpu/amdgpu_job.c struct dma_fence *fence = NULL, *finished; fence 237 drivers/gpu/drm/amd/amdgpu/amdgpu_job.c &fence); fence 242 drivers/gpu/drm/amd/amdgpu/amdgpu_job.c dma_fence_put(job->fence); fence 243 drivers/gpu/drm/amd/amdgpu/amdgpu_job.c job->fence = dma_fence_get(fence); fence 247 drivers/gpu/drm/amd/amdgpu/amdgpu_job.c fence = r ? ERR_PTR(r) : fence; fence 248 drivers/gpu/drm/amd/amdgpu/amdgpu_job.c return fence; fence 48 drivers/gpu/drm/amd/amdgpu/amdgpu_job.h struct dma_fence *fence; /* the hw fence */ fence 78 drivers/gpu/drm/amd/amdgpu/amdgpu_job.h struct dma_fence **fence); fence 591 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c struct dma_fence *fence; fence 593 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c r = amdgpu_fill_buffer(bo, 0, bo->tbo.base.resv, &fence); fence 597 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c amdgpu_bo_fence(bo, fence, false); fence 599 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c bo->tbo.moving = dma_fence_get(fence); fence 600 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c dma_fence_put(fence); fence 740 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c int amdgpu_bo_restore_shadow(struct amdgpu_bo *shadow, struct dma_fence **fence) fence 751 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c amdgpu_bo_size(shadow), NULL, fence, fence 1292 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c struct dma_fence *fence = NULL; fence 1310 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c r = amdgpu_fill_buffer(abo, AMDGPU_POISON, bo->base.resv, &fence); fence 1312 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c amdgpu_bo_fence(abo, fence, false); fence 1313 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c dma_fence_put(fence); fence 1388 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c void amdgpu_bo_fence(struct amdgpu_bo *bo, struct dma_fence *fence, fence 1394 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c dma_resv_add_shared_fence(resv, fence); fence 1396 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c dma_resv_add_excl_fence(resv, fence); fence 272 drivers/gpu/drm/amd/amdgpu/amdgpu_object.h void amdgpu_bo_fence(struct amdgpu_bo *bo, struct dma_fence *fence, fence 278 drivers/gpu/drm/amd/amdgpu/amdgpu_object.h struct dma_fence **fence); fence 308 drivers/gpu/drm/amd/amdgpu/amdgpu_object.h struct dma_fence *fence); fence 409 drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c struct dma_fence *fence) fence 413 drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c if (amdgpu_sriov_vf(ring->adev) || !ring->funcs->soft_recovery || !fence) fence 417 drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c while (!dma_fence_is_signaled(fence) && fence 421 drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c return dma_fence_is_signaled(fence); fence 97 drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h int amdgpu_fence_emit(struct amdgpu_ring *ring, struct dma_fence **fence, fence 273 drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h struct dma_fence *fence); fence 110 drivers/gpu/drm/amd/amdgpu/amdgpu_sa.c dma_fence_put(sa_bo->fence); fence 123 drivers/gpu/drm/amd/amdgpu/amdgpu_sa.c if (sa_bo->fence == NULL || fence 124 drivers/gpu/drm/amd/amdgpu/amdgpu_sa.c !dma_fence_is_signaled(sa_bo->fence)) { fence 237 drivers/gpu/drm/amd/amdgpu/amdgpu_sa.c if (!dma_fence_is_signaled(sa_bo->fence)) { fence 238 drivers/gpu/drm/amd/amdgpu/amdgpu_sa.c fences[i] = sa_bo->fence; fence 261 drivers/gpu/drm/amd/amdgpu/amdgpu_sa.c uint32_t idx = best_bo->fence->context; fence 295 drivers/gpu/drm/amd/amdgpu/amdgpu_sa.c (*sa_bo)->fence = NULL; fence 347 drivers/gpu/drm/amd/amdgpu/amdgpu_sa.c struct dma_fence *fence) fence 357 drivers/gpu/drm/amd/amdgpu/amdgpu_sa.c if (fence && !dma_fence_is_signaled(fence)) { fence 360 drivers/gpu/drm/amd/amdgpu/amdgpu_sa.c (*sa_bo)->fence = dma_fence_get(fence); fence 361 drivers/gpu/drm/amd/amdgpu/amdgpu_sa.c idx = fence->context % AMDGPU_SA_NUM_FENCE_LISTS; fence 390 drivers/gpu/drm/amd/amdgpu/amdgpu_sa.c if (i->fence) fence 392 drivers/gpu/drm/amd/amdgpu/amdgpu_sa.c i->fence->seqno, i->fence->context); fence 37 drivers/gpu/drm/amd/amdgpu/amdgpu_sync.c struct dma_fence *fence; fence 114 drivers/gpu/drm/amd/amdgpu/amdgpu_sync.c struct dma_fence *fence) fence 116 drivers/gpu/drm/amd/amdgpu/amdgpu_sync.c if (*keep && dma_fence_is_later(*keep, fence)) fence 120 drivers/gpu/drm/amd/amdgpu/amdgpu_sync.c *keep = dma_fence_get(fence); fence 137 drivers/gpu/drm/amd/amdgpu/amdgpu_sync.c if (unlikely(e->fence->context != f->context)) fence 140 drivers/gpu/drm/amd/amdgpu/amdgpu_sync.c amdgpu_sync_keep_later(&e->fence, f); fence 178 drivers/gpu/drm/amd/amdgpu/amdgpu_sync.c e->fence = dma_fence_get(f); fence 266 drivers/gpu/drm/amd/amdgpu/amdgpu_sync.c struct dma_fence *f = e->fence; fence 309 drivers/gpu/drm/amd/amdgpu/amdgpu_sync.c f = e->fence; fence 341 drivers/gpu/drm/amd/amdgpu/amdgpu_sync.c f = e->fence; fence 366 drivers/gpu/drm/amd/amdgpu/amdgpu_sync.c r = dma_fence_wait(e->fence, intr); fence 371 drivers/gpu/drm/amd/amdgpu/amdgpu_sync.c dma_fence_put(e->fence); fence 393 drivers/gpu/drm/amd/amdgpu/amdgpu_sync.c dma_fence_put(e->fence); fence 89 drivers/gpu/drm/amd/amdgpu/amdgpu_test.c struct dma_fence *fence = NULL; fence 127 drivers/gpu/drm/amd/amdgpu/amdgpu_test.c size, NULL, &fence, false, false); fence 134 drivers/gpu/drm/amd/amdgpu/amdgpu_test.c r = dma_fence_wait(fence, false); fence 140 drivers/gpu/drm/amd/amdgpu/amdgpu_test.c dma_fence_put(fence); fence 141 drivers/gpu/drm/amd/amdgpu/amdgpu_test.c fence = NULL; fence 173 drivers/gpu/drm/amd/amdgpu/amdgpu_test.c size, NULL, &fence, false, false); fence 180 drivers/gpu/drm/amd/amdgpu/amdgpu_test.c r = dma_fence_wait(fence, false); fence 186 drivers/gpu/drm/amd/amdgpu/amdgpu_test.c dma_fence_put(fence); fence 187 drivers/gpu/drm/amd/amdgpu/amdgpu_test.c fence = NULL; fence 233 drivers/gpu/drm/amd/amdgpu/amdgpu_test.c if (fence) fence 234 drivers/gpu/drm/amd/amdgpu/amdgpu_test.c dma_fence_put(fence); fence 172 drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h __field(struct dma_fence *, fence) fence 468 drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h TP_PROTO(struct amdgpu_job *sched_job, struct dma_fence *fence), fence 469 drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h TP_ARGS(sched_job, fence), fence 473 drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h __field(struct dma_fence *, fence) fence 481 drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h __entry->fence = fence; fence 482 drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h __entry->ctx = fence->context; fence 483 drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h __entry->seqno = fence->seqno; fence 487 drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h __entry->fence, __entry->ctx, fence 314 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c struct dma_fence *fence = NULL; fence 383 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c dma_fence_put(fence); fence 384 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c fence = next; fence 414 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c *f = dma_fence_get(fence); fence 415 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c dma_fence_put(fence); fence 432 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c struct dma_fence *fence = NULL; fence 444 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c bo->base.resv, &fence); fence 459 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c dma_fence_put(fence); fence 460 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c fence = wipe_fence; fence 466 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c r = ttm_bo_move_accel_cleanup(bo, fence, true, new_mem); fence 468 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c r = ttm_bo_pipeline_move(bo, fence, evict, new_mem); fence 469 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c dma_fence_put(fence); fence 473 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c if (fence) fence 474 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c dma_fence_wait(fence, false); fence 475 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c dma_fence_put(fence); fence 1909 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c struct dma_fence *fence; fence 1950 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c AMDGPU_FENCE_OWNER_UNDEFINED, &fence); fence 1954 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c dma_fence_put(fence); fence 1966 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c struct dma_fence **fence, bool direct_submit, fence 2022 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c r = amdgpu_job_submit_direct(job, ring, fence); fence 2025 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c AMDGPU_FENCE_OWNER_UNDEFINED, fence); fence 2040 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c struct dma_fence **fence) fence 2118 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c AMDGPU_FENCE_OWNER_UNDEFINED, fence); fence 89 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.h struct dma_fence **fence, bool direct_submit, fence 100 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.h struct dma_fence **fence); fence 428 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c struct dma_fence *fence; fence 431 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c &fence); fence 437 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c dma_fence_wait(fence, false); fence 438 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c dma_fence_put(fence); fence 1020 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c bool direct, struct dma_fence **fence) fence 1103 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c if (fence) fence 1104 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c *fence = dma_fence_get(f); fence 1122 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c struct dma_fence **fence) fence 1150 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c return amdgpu_uvd_send_msg(ring, bo, true, fence); fence 1154 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c bool direct, struct dma_fence **fence) fence 1175 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c return amdgpu_uvd_send_msg(ring, bo, direct, fence); fence 1246 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c struct dma_fence *fence; fence 1253 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c r = amdgpu_uvd_get_destroy_msg(ring, 1, true, &fence); fence 1257 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c r = dma_fence_wait_timeout(fence, false, timeout); fence 1263 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c dma_fence_put(fence); fence 78 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.h struct dma_fence **fence); fence 80 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.h bool direct, struct dma_fence **fence); fence 433 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c struct dma_fence **fence) fence 491 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c if (fence) fence 492 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c *fence = dma_fence_get(f); fence 512 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c bool direct, struct dma_fence **fence) fence 555 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c if (fence) fence 556 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c *fence = dma_fence_get(f); fence 1113 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c struct dma_fence *fence = NULL; fence 1131 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c r = amdgpu_vce_get_destroy_msg(ring, 1, true, &fence); fence 1135 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c r = dma_fence_wait_timeout(fence, false, timeout); fence 1142 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c dma_fence_put(fence); fence 63 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.h struct dma_fence **fence); fence 65 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.h bool direct, struct dma_fence **fence); fence 291 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c unsigned int fences = 0, fence[AMDGPU_MAX_VCN_INSTANCES] = {0}; fence 298 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c fence[j] += amdgpu_fence_count_emitted(&adev->vcn.inst[j].ring_enc[i]); fence 304 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c if (fence[j]) fence 317 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c fence[j] += amdgpu_fence_count_emitted(&adev->vcn.inst[j].ring_jpeg); fence 318 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c fence[j] += amdgpu_fence_count_emitted(&adev->vcn.inst[j].ring_dec); fence 319 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c fences += fence[j]; fence 409 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c struct dma_fence **fence) fence 444 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c if (fence) fence 445 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c *fence = dma_fence_get(f); fence 460 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c struct dma_fence **fence) fence 490 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c return amdgpu_vcn_dec_send_msg(ring, bo, fence); fence 494 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c struct dma_fence **fence) fence 516 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c return amdgpu_vcn_dec_send_msg(ring, bo, fence); fence 521 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c struct dma_fence *fence; fence 528 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c r = amdgpu_vcn_dec_get_destroy_msg(ring, 1, &fence); fence 532 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c r = dma_fence_wait_timeout(fence, false, timeout); fence 538 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c dma_fence_put(fence); fence 573 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c struct dma_fence **fence) fence 613 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c if (fence) fence 614 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c *fence = dma_fence_get(f); fence 626 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c struct dma_fence **fence) fence 666 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c if (fence) fence 667 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c *fence = dma_fence_get(f); fence 679 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c struct dma_fence *fence = NULL; fence 693 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c r = amdgpu_vcn_enc_get_destroy_msg(ring, 1, bo, &fence); fence 697 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c r = dma_fence_wait_timeout(fence, false, timeout); fence 704 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c dma_fence_put(fence); fence 740 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c struct dma_fence **fence) fence 767 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c if (fence) fence 768 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c *fence = dma_fence_get(f); fence 783 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c struct dma_fence *fence = NULL; fence 786 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c r = amdgpu_vcn_jpeg_set_reg(ring, 1, &fence); fence 790 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c r = dma_fence_wait_timeout(fence, false, timeout); fence 810 drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c dma_fence_put(fence); fence 1037 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c struct dma_fence *fence = NULL; fence 1078 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c r = amdgpu_fence_emit(ring, &fence, 0); fence 1086 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c id->last_flush = dma_fence_get(fence); fence 1096 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c id->pasid_mapping = dma_fence_get(fence); fence 1099 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c dma_fence_put(fence); fence 1511 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c struct dma_fence **fence) fence 1534 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c return vm->update_funcs->commit(¶ms, fence); fence 1564 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c struct dma_fence **fence) fence 1653 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c fence); fence 1819 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c static void amdgpu_vm_prt_cb(struct dma_fence *fence, struct dma_fence_cb *_cb) fence 1834 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c struct dma_fence *fence) fence 1844 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c if (fence) fence 1845 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c dma_fence_wait(fence, false); fence 1850 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c if (!fence || dma_fence_add_callback(fence, &cb->cb, fence 1852 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c amdgpu_vm_prt_cb(fence, &cb->cb); fence 1869 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c struct dma_fence *fence) fence 1872 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c amdgpu_vm_add_prt_cb(adev, fence); fence 1931 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c struct dma_fence **fence) fence 1957 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c if (fence && f) { fence 1958 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c dma_fence_put(*fence); fence 1959 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c *fence = f; fence 227 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.h struct dma_fence **fence); fence 364 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.h struct dma_fence **fence); fence 114 drivers/gpu/drm/amd/amdgpu/amdgpu_vm_cpu.c struct dma_fence **fence) fence 94 drivers/gpu/drm/amd/amdgpu/amdgpu_vm_sdma.c struct dma_fence **fence) fence 113 drivers/gpu/drm/amd/amdgpu/amdgpu_vm_sdma.c if (fence) fence 114 drivers/gpu/drm/amd/amdgpu/amdgpu_vm_sdma.c swap(*fence, f); fence 310 drivers/gpu/drm/amd/amdgpu/gmc_v10_0.c struct dma_fence *fence; fence 350 drivers/gpu/drm/amd/amdgpu/gmc_v10_0.c AMDGPU_FENCE_OWNER_UNDEFINED, &fence); fence 356 drivers/gpu/drm/amd/amdgpu/gmc_v10_0.c dma_fence_wait(fence, false); fence 357 drivers/gpu/drm/amd/amdgpu/gmc_v10_0.c dma_fence_put(fence); fence 210 drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c struct dma_fence **fence) fence 250 drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c if (fence) fence 251 drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c *fence = dma_fence_get(f); fence 273 drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c struct dma_fence **fence) fence 313 drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c if (fence) fence 314 drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c *fence = dma_fence_get(f); fence 331 drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c struct dma_fence *fence = NULL; fence 345 drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c r = uvd_v6_0_enc_get_destroy_msg(ring, 1, bo, &fence); fence 349 drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c r = dma_fence_wait_timeout(fence, false, timeout); fence 356 drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c dma_fence_put(fence); fence 218 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c struct dma_fence **fence) fence 258 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c if (fence) fence 259 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c *fence = dma_fence_get(f); fence 280 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c struct dma_fence **fence) fence 320 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c if (fence) fence 321 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c *fence = dma_fence_get(f); fence 338 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c struct dma_fence *fence = NULL; fence 352 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c r = uvd_v7_0_enc_get_destroy_msg(ring, 1, bo, &fence); fence 356 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c r = dma_fence_wait_timeout(fence, false, timeout); fence 363 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c dma_fence_put(fence); fence 925 drivers/gpu/drm/amd/amdkfd/kfd_device.c struct dma_fence *fence) fence 931 drivers/gpu/drm/amd/amdkfd/kfd_device.c if (!fence) fence 934 drivers/gpu/drm/amd/amdkfd/kfd_device.c if (dma_fence_is_signaled(fence)) fence 941 drivers/gpu/drm/amd/amdkfd/kfd_device.c if (fence->seqno == p->last_eviction_seqno) fence 944 drivers/gpu/drm/amd/amdkfd/kfd_device.c p->last_eviction_seqno = fence->seqno; fence 1393 drivers/gpu/drm/drm_atomic_helper.c if (!new_plane_state->fence) fence 1403 drivers/gpu/drm/drm_atomic_helper.c ret = dma_fence_wait(new_plane_state->fence, pre_swap); fence 1407 drivers/gpu/drm/drm_atomic_helper.c dma_fence_put(new_plane_state->fence); fence 1408 drivers/gpu/drm/drm_atomic_helper.c new_plane_state->fence = NULL; fence 1685 drivers/gpu/drm/drm_atomic_helper.c if (new_plane_state->fence) fence 270 drivers/gpu/drm/drm_atomic_state_helper.c state->fence = NULL; fence 312 drivers/gpu/drm/drm_atomic_state_helper.c if (state->fence) fence 313 drivers/gpu/drm/drm_atomic_state_helper.c dma_fence_put(state->fence); fence 270 drivers/gpu/drm/drm_atomic_uapi.c struct dma_fence *fence) fence 272 drivers/gpu/drm/drm_atomic_uapi.c if (plane_state->fence) { fence 273 drivers/gpu/drm/drm_atomic_uapi.c dma_fence_put(fence); fence 277 drivers/gpu/drm/drm_atomic_uapi.c plane_state->fence = fence; fence 530 drivers/gpu/drm/drm_atomic_uapi.c if (state->fence) fence 536 drivers/gpu/drm/drm_atomic_uapi.c state->fence = sync_file_get_fence(val); fence 537 drivers/gpu/drm/drm_atomic_uapi.c if (!state->fence) fence 1087 drivers/gpu/drm/drm_atomic_uapi.c struct dma_fence *fence) fence 1096 drivers/gpu/drm/drm_atomic_uapi.c fence_state->sync_file = sync_file_create(fence); fence 1150 drivers/gpu/drm/drm_atomic_uapi.c struct dma_fence *fence; fence 1163 drivers/gpu/drm/drm_atomic_uapi.c fence = drm_crtc_create_fence(crtc); fence 1164 drivers/gpu/drm/drm_atomic_uapi.c if (!fence) fence 1167 drivers/gpu/drm/drm_atomic_uapi.c ret = setup_out_fence(&f[(*num_fences)++], fence); fence 1169 drivers/gpu/drm/drm_atomic_uapi.c dma_fence_put(fence); fence 1173 drivers/gpu/drm/drm_atomic_uapi.c crtc_state->event->base.fence = fence; fence 1182 drivers/gpu/drm/drm_atomic_uapi.c struct dma_fence *fence; fence 1203 drivers/gpu/drm/drm_atomic_uapi.c fence = drm_writeback_get_out_fence(wb_conn); fence 1204 drivers/gpu/drm/drm_atomic_uapi.c if (!fence) fence 1207 drivers/gpu/drm/drm_atomic_uapi.c ret = setup_out_fence(&f[(*num_fences)++], fence); fence 1209 drivers/gpu/drm/drm_atomic_uapi.c dma_fence_put(fence); fence 1213 drivers/gpu/drm/drm_atomic_uapi.c conn_state->writeback_job->out_fence = fence; fence 1252 drivers/gpu/drm/drm_atomic_uapi.c if (event && (event->base.fence || event->base.file_priv)) { fence 168 drivers/gpu/drm/drm_crtc.c static struct drm_crtc *fence_to_crtc(struct dma_fence *fence) fence 170 drivers/gpu/drm/drm_crtc.c BUG_ON(fence->ops != &drm_crtc_fence_ops); fence 171 drivers/gpu/drm/drm_crtc.c return container_of(fence->lock, struct drm_crtc, fence_lock); fence 174 drivers/gpu/drm/drm_crtc.c static const char *drm_crtc_fence_get_driver_name(struct dma_fence *fence) fence 176 drivers/gpu/drm/drm_crtc.c struct drm_crtc *crtc = fence_to_crtc(fence); fence 181 drivers/gpu/drm/drm_crtc.c static const char *drm_crtc_fence_get_timeline_name(struct dma_fence *fence) fence 183 drivers/gpu/drm/drm_crtc.c struct drm_crtc *crtc = fence_to_crtc(fence); fence 195 drivers/gpu/drm/drm_crtc.c struct dma_fence *fence; fence 197 drivers/gpu/drm/drm_crtc.c fence = kzalloc(sizeof(*fence), GFP_KERNEL); fence 198 drivers/gpu/drm/drm_crtc.c if (!fence) fence 201 drivers/gpu/drm/drm_crtc.c dma_fence_init(fence, &drm_crtc_fence_ops, &crtc->fence_lock, fence 204 drivers/gpu/drm/drm_crtc.c return fence; fence 685 drivers/gpu/drm/drm_file.c if (p->fence) fence 686 drivers/gpu/drm/drm_file.c dma_fence_put(p->fence); fence 716 drivers/gpu/drm/drm_file.c if (e->fence) { fence 717 drivers/gpu/drm/drm_file.c dma_fence_signal(e->fence); fence 718 drivers/gpu/drm/drm_file.c dma_fence_put(e->fence); fence 1357 drivers/gpu/drm/drm_gem.c struct dma_fence *fence) fence 1364 drivers/gpu/drm/drm_gem.c if (!fence) fence 1372 drivers/gpu/drm/drm_gem.c if (entry->context != fence->context) fence 1375 drivers/gpu/drm/drm_gem.c if (dma_fence_is_later(fence, entry)) { fence 1377 drivers/gpu/drm/drm_gem.c xa_store(fence_array, index, fence, GFP_KERNEL); fence 1379 drivers/gpu/drm/drm_gem.c dma_fence_put(fence); fence 1384 drivers/gpu/drm/drm_gem.c ret = xa_alloc(fence_array, &id, fence, xa_limit_32b, GFP_KERNEL); fence 1386 drivers/gpu/drm/drm_gem.c dma_fence_put(fence); fence 1415 drivers/gpu/drm/drm_gem.c struct dma_fence *fence = fence 1418 drivers/gpu/drm/drm_gem.c return drm_gem_fence_array_add(fence_array, fence); fence 291 drivers/gpu/drm/drm_gem_framebuffer_helper.c struct dma_fence *fence; fence 297 drivers/gpu/drm/drm_gem_framebuffer_helper.c fence = dma_resv_get_excl_rcu(obj->resv); fence 298 drivers/gpu/drm/drm_gem_framebuffer_helper.c drm_atomic_set_fence_for_plane(state, fence); fence 144 drivers/gpu/drm/drm_syncobj.c struct dma_fence *fence; fence 181 drivers/gpu/drm/drm_syncobj.c struct dma_fence *fence; fence 183 drivers/gpu/drm/drm_syncobj.c if (wait->fence) fence 191 drivers/gpu/drm/drm_syncobj.c fence = dma_fence_get(rcu_dereference_protected(syncobj->fence, 1)); fence 192 drivers/gpu/drm/drm_syncobj.c if (!fence || dma_fence_chain_find_seqno(&fence, wait->point)) { fence 193 drivers/gpu/drm/drm_syncobj.c dma_fence_put(fence); fence 195 drivers/gpu/drm/drm_syncobj.c } else if (!fence) { fence 196 drivers/gpu/drm/drm_syncobj.c wait->fence = dma_fence_get_stub(); fence 198 drivers/gpu/drm/drm_syncobj.c wait->fence = fence; fence 225 drivers/gpu/drm/drm_syncobj.c struct dma_fence *fence, fence 231 drivers/gpu/drm/drm_syncobj.c dma_fence_get(fence); fence 239 drivers/gpu/drm/drm_syncobj.c dma_fence_chain_init(chain, prev, fence, point); fence 240 drivers/gpu/drm/drm_syncobj.c rcu_assign_pointer(syncobj->fence, &chain->base); fence 247 drivers/gpu/drm/drm_syncobj.c dma_fence_chain_for_each(fence, prev); fence 260 drivers/gpu/drm/drm_syncobj.c struct dma_fence *fence) fence 265 drivers/gpu/drm/drm_syncobj.c if (fence) fence 266 drivers/gpu/drm/drm_syncobj.c dma_fence_get(fence); fence 270 drivers/gpu/drm/drm_syncobj.c old_fence = rcu_dereference_protected(syncobj->fence, fence 272 drivers/gpu/drm/drm_syncobj.c rcu_assign_pointer(syncobj->fence, fence); fence 274 drivers/gpu/drm/drm_syncobj.c if (fence != old_fence) { fence 293 drivers/gpu/drm/drm_syncobj.c struct dma_fence *fence = dma_fence_get_stub(); fence 295 drivers/gpu/drm/drm_syncobj.c drm_syncobj_replace_fence(syncobj, fence); fence 296 drivers/gpu/drm/drm_syncobj.c dma_fence_put(fence); fence 318 drivers/gpu/drm/drm_syncobj.c struct dma_fence **fence) fence 328 drivers/gpu/drm/drm_syncobj.c *fence = drm_syncobj_fence_get(syncobj); fence 331 drivers/gpu/drm/drm_syncobj.c if (*fence) { fence 332 drivers/gpu/drm/drm_syncobj.c ret = dma_fence_chain_find_seqno(fence, point); fence 335 drivers/gpu/drm/drm_syncobj.c dma_fence_put(*fence); fence 350 drivers/gpu/drm/drm_syncobj.c if (wait.fence) { fence 368 drivers/gpu/drm/drm_syncobj.c *fence = wait.fence; fence 406 drivers/gpu/drm/drm_syncobj.c struct dma_fence *fence) fence 421 drivers/gpu/drm/drm_syncobj.c if (fence) fence 422 drivers/gpu/drm/drm_syncobj.c drm_syncobj_replace_fence(syncobj, fence); fence 594 drivers/gpu/drm/drm_syncobj.c struct dma_fence *fence = sync_file_get_fence(fd); fence 597 drivers/gpu/drm/drm_syncobj.c if (!fence) fence 602 drivers/gpu/drm/drm_syncobj.c dma_fence_put(fence); fence 606 drivers/gpu/drm/drm_syncobj.c drm_syncobj_replace_fence(syncobj, fence); fence 607 drivers/gpu/drm/drm_syncobj.c dma_fence_put(fence); fence 616 drivers/gpu/drm/drm_syncobj.c struct dma_fence *fence; fence 623 drivers/gpu/drm/drm_syncobj.c ret = drm_syncobj_find_fence(file_private, handle, 0, 0, &fence); fence 627 drivers/gpu/drm/drm_syncobj.c sync_file = sync_file_create(fence); fence 629 drivers/gpu/drm/drm_syncobj.c dma_fence_put(fence); fence 768 drivers/gpu/drm/drm_syncobj.c struct dma_fence *fence; fence 778 drivers/gpu/drm/drm_syncobj.c &fence); fence 786 drivers/gpu/drm/drm_syncobj.c drm_syncobj_add_point(timeline_syncobj, chain, fence, args->dst_point); fence 788 drivers/gpu/drm/drm_syncobj.c dma_fence_put(fence); fence 800 drivers/gpu/drm/drm_syncobj.c struct dma_fence *fence; fence 807 drivers/gpu/drm/drm_syncobj.c args->src_point, args->flags, &fence); fence 810 drivers/gpu/drm/drm_syncobj.c drm_syncobj_replace_fence(binary_syncobj, fence); fence 811 drivers/gpu/drm/drm_syncobj.c dma_fence_put(fence); fence 838 drivers/gpu/drm/drm_syncobj.c static void syncobj_wait_fence_func(struct dma_fence *fence, fence 850 drivers/gpu/drm/drm_syncobj.c struct dma_fence *fence; fence 853 drivers/gpu/drm/drm_syncobj.c fence = rcu_dereference_protected(syncobj->fence, fence 855 drivers/gpu/drm/drm_syncobj.c dma_fence_get(fence); fence 856 drivers/gpu/drm/drm_syncobj.c if (!fence || dma_fence_chain_find_seqno(&fence, wait->point)) { fence 857 drivers/gpu/drm/drm_syncobj.c dma_fence_put(fence); fence 859 drivers/gpu/drm/drm_syncobj.c } else if (!fence) { fence 860 drivers/gpu/drm/drm_syncobj.c wait->fence = dma_fence_get_stub(); fence 862 drivers/gpu/drm/drm_syncobj.c wait->fence = fence; fence 877 drivers/gpu/drm/drm_syncobj.c struct dma_fence *fence; fence 906 drivers/gpu/drm/drm_syncobj.c struct dma_fence *fence; fence 910 drivers/gpu/drm/drm_syncobj.c fence = drm_syncobj_fence_get(syncobjs[i]); fence 911 drivers/gpu/drm/drm_syncobj.c if (!fence || dma_fence_chain_find_seqno(&fence, points[i])) { fence 912 drivers/gpu/drm/drm_syncobj.c dma_fence_put(fence); fence 921 drivers/gpu/drm/drm_syncobj.c if (fence) fence 922 drivers/gpu/drm/drm_syncobj.c entries[i].fence = fence; fence 924 drivers/gpu/drm/drm_syncobj.c entries[i].fence = dma_fence_get_stub(); fence 927 drivers/gpu/drm/drm_syncobj.c dma_fence_is_signaled(entries[i].fence)) { fence 956 drivers/gpu/drm/drm_syncobj.c fence = entries[i].fence; fence 957 drivers/gpu/drm/drm_syncobj.c if (!fence) fence 961 drivers/gpu/drm/drm_syncobj.c dma_fence_is_signaled(fence) || fence 963 drivers/gpu/drm/drm_syncobj.c dma_fence_add_callback(fence, fence 1000 drivers/gpu/drm/drm_syncobj.c dma_fence_remove_callback(entries[i].fence, fence 1002 drivers/gpu/drm/drm_syncobj.c dma_fence_put(entries[i].fence); fence 1325 drivers/gpu/drm/drm_syncobj.c struct dma_fence *fence = dma_fence_get_stub(); fence 1328 drivers/gpu/drm/drm_syncobj.c fence, points[i]); fence 1329 drivers/gpu/drm/drm_syncobj.c dma_fence_put(fence); fence 1368 drivers/gpu/drm/drm_syncobj.c struct dma_fence *fence; fence 1371 drivers/gpu/drm/drm_syncobj.c fence = drm_syncobj_fence_get(syncobjs[i]); fence 1372 drivers/gpu/drm/drm_syncobj.c chain = to_dma_fence_chain(fence); fence 1376 drivers/gpu/drm/drm_syncobj.c dma_fence_chain_for_each(iter, fence) { fence 1377 drivers/gpu/drm/drm_syncobj.c if (iter->context != fence->context) { fence 85 drivers/gpu/drm/drm_writeback.c static const char *drm_writeback_fence_get_driver_name(struct dma_fence *fence) fence 88 drivers/gpu/drm/drm_writeback.c fence_to_wb_connector(fence); fence 94 drivers/gpu/drm/drm_writeback.c drm_writeback_fence_get_timeline_name(struct dma_fence *fence) fence 97 drivers/gpu/drm/drm_writeback.c fence_to_wb_connector(fence); fence 102 drivers/gpu/drm/drm_writeback.c static bool drm_writeback_fence_enable_signaling(struct dma_fence *fence) fence 403 drivers/gpu/drm/drm_writeback.c struct dma_fence *fence; fence 409 drivers/gpu/drm/drm_writeback.c fence = kzalloc(sizeof(*fence), GFP_KERNEL); fence 410 drivers/gpu/drm/drm_writeback.c if (!fence) fence 413 drivers/gpu/drm/drm_writeback.c dma_fence_init(fence, &drm_writeback_fence_ops, fence 417 drivers/gpu/drm/drm_writeback.c return fence; fence 373 drivers/gpu/drm/etnaviv/etnaviv_drv.c return etnaviv_gpu_wait_fence_interruptible(gpu, args->fence, fence 442 drivers/gpu/drm/etnaviv/etnaviv_gem.c static void etnaviv_gem_describe_fence(struct dma_fence *fence, fence 445 drivers/gpu/drm/etnaviv/etnaviv_gem.c if (!test_bit(DMA_FENCE_FLAG_SIGNALED_BIT, &fence->flags)) fence 448 drivers/gpu/drm/etnaviv/etnaviv_gem.c fence->ops->get_driver_name(fence), fence 449 drivers/gpu/drm/etnaviv/etnaviv_gem.c fence->ops->get_timeline_name(fence), fence 450 drivers/gpu/drm/etnaviv/etnaviv_gem.c fence->seqno); fence 458 drivers/gpu/drm/etnaviv/etnaviv_gem.c struct dma_fence *fence; fence 467 drivers/gpu/drm/etnaviv/etnaviv_gem.c fobj = rcu_dereference(robj->fence); fence 472 drivers/gpu/drm/etnaviv/etnaviv_gem.c fence = rcu_dereference(fobj->shared[i]); fence 473 drivers/gpu/drm/etnaviv/etnaviv_gem.c etnaviv_gem_describe_fence(fence, "Shared", m); fence 477 drivers/gpu/drm/etnaviv/etnaviv_gem.c fence = rcu_dereference(robj->fence_excl); fence 478 drivers/gpu/drm/etnaviv/etnaviv_gem.c if (fence) fence 479 drivers/gpu/drm/etnaviv/etnaviv_gem.c etnaviv_gem_describe_fence(fence, "Exclusive", m); fence 606 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c args->fence = submit->out_fence_id; fence 1009 drivers/gpu/drm/etnaviv/etnaviv_gpu.c static inline struct etnaviv_fence *to_etnaviv_fence(struct dma_fence *fence) fence 1011 drivers/gpu/drm/etnaviv/etnaviv_gpu.c return container_of(fence, struct etnaviv_fence, base); fence 1014 drivers/gpu/drm/etnaviv/etnaviv_gpu.c static const char *etnaviv_fence_get_driver_name(struct dma_fence *fence) fence 1019 drivers/gpu/drm/etnaviv/etnaviv_gpu.c static const char *etnaviv_fence_get_timeline_name(struct dma_fence *fence) fence 1021 drivers/gpu/drm/etnaviv/etnaviv_gpu.c struct etnaviv_fence *f = to_etnaviv_fence(fence); fence 1026 drivers/gpu/drm/etnaviv/etnaviv_gpu.c static bool etnaviv_fence_signaled(struct dma_fence *fence) fence 1028 drivers/gpu/drm/etnaviv/etnaviv_gpu.c struct etnaviv_fence *f = to_etnaviv_fence(fence); fence 1033 drivers/gpu/drm/etnaviv/etnaviv_gpu.c static void etnaviv_fence_release(struct dma_fence *fence) fence 1035 drivers/gpu/drm/etnaviv/etnaviv_gpu.c struct etnaviv_fence *f = to_etnaviv_fence(fence); fence 1137 drivers/gpu/drm/etnaviv/etnaviv_gpu.c struct dma_fence *fence; fence 1146 drivers/gpu/drm/etnaviv/etnaviv_gpu.c fence = idr_find(&gpu->fence_idr, id); fence 1147 drivers/gpu/drm/etnaviv/etnaviv_gpu.c if (fence) fence 1148 drivers/gpu/drm/etnaviv/etnaviv_gpu.c fence = dma_fence_get_rcu(fence); fence 1151 drivers/gpu/drm/etnaviv/etnaviv_gpu.c if (!fence) fence 1156 drivers/gpu/drm/etnaviv/etnaviv_gpu.c ret = dma_fence_is_signaled(fence) ? 0 : -EBUSY; fence 1160 drivers/gpu/drm/etnaviv/etnaviv_gpu.c ret = dma_fence_wait_timeout(fence, true, remaining); fence 1168 drivers/gpu/drm/etnaviv/etnaviv_gpu.c dma_fence_put(fence); fence 1319 drivers/gpu/drm/etnaviv/etnaviv_gpu.c gpu->event[event[0]].fence = gpu_fence; fence 1406 drivers/gpu/drm/etnaviv/etnaviv_gpu.c struct dma_fence *fence; fence 1419 drivers/gpu/drm/etnaviv/etnaviv_gpu.c fence = gpu->event[event].fence; fence 1420 drivers/gpu/drm/etnaviv/etnaviv_gpu.c if (!fence) fence 1423 drivers/gpu/drm/etnaviv/etnaviv_gpu.c gpu->event[event].fence = NULL; fence 1434 drivers/gpu/drm/etnaviv/etnaviv_gpu.c if (fence_after(fence->seqno, gpu->completed_fence)) fence 1435 drivers/gpu/drm/etnaviv/etnaviv_gpu.c gpu->completed_fence = fence->seqno; fence 1436 drivers/gpu/drm/etnaviv/etnaviv_gpu.c dma_fence_signal(fence); fence 82 drivers/gpu/drm/etnaviv/etnaviv_gpu.h struct dma_fence *fence; fence 172 drivers/gpu/drm/etnaviv/etnaviv_gpu.h u32 fence, struct timespec *timeout); fence 25 drivers/gpu/drm/etnaviv/etnaviv_sched.c struct dma_fence *fence; fence 29 drivers/gpu/drm/etnaviv/etnaviv_sched.c fence = submit->in_fence; fence 32 drivers/gpu/drm/etnaviv/etnaviv_sched.c if (!dma_fence_is_signaled(fence)) fence 33 drivers/gpu/drm/etnaviv/etnaviv_sched.c return fence; fence 35 drivers/gpu/drm/etnaviv/etnaviv_sched.c dma_fence_put(fence); fence 43 drivers/gpu/drm/etnaviv/etnaviv_sched.c fence = bo->excl; fence 46 drivers/gpu/drm/etnaviv/etnaviv_sched.c if (!dma_fence_is_signaled(fence)) fence 47 drivers/gpu/drm/etnaviv/etnaviv_sched.c return fence; fence 49 drivers/gpu/drm/etnaviv/etnaviv_sched.c dma_fence_put(fence); fence 56 drivers/gpu/drm/etnaviv/etnaviv_sched.c fence = bo->shared[j]; fence 59 drivers/gpu/drm/etnaviv/etnaviv_sched.c if (!dma_fence_is_signaled(fence)) fence 60 drivers/gpu/drm/etnaviv/etnaviv_sched.c return fence; fence 62 drivers/gpu/drm/etnaviv/etnaviv_sched.c dma_fence_put(fence); fence 75 drivers/gpu/drm/etnaviv/etnaviv_sched.c struct dma_fence *fence = NULL; fence 78 drivers/gpu/drm/etnaviv/etnaviv_sched.c fence = etnaviv_gpu_submit(submit); fence 82 drivers/gpu/drm/etnaviv/etnaviv_sched.c return fence; fence 2149 drivers/gpu/drm/i915/display/intel_display.c if (ret == 0 && vma->fence) fence 14114 drivers/gpu/drm/i915/display/intel_display.c intel_atomic_commit_ready(struct i915_sw_fence *fence, fence 14118 drivers/gpu/drm/i915/display/intel_display.c container_of(fence, struct intel_atomic_state, commit_ready); fence 14274 drivers/gpu/drm/i915/display/intel_display.c struct dma_fence *fence) fence 14278 drivers/gpu/drm/i915/display/intel_display.c if (!dma_fence_is_i915(fence)) fence 14293 drivers/gpu/drm/i915/display/intel_display.c wait->request = to_request(dma_fence_get(fence)); fence 14402 drivers/gpu/drm/i915/display/intel_display.c if (new_state->fence) { /* explicit fencing */ fence 14404 drivers/gpu/drm/i915/display/intel_display.c new_state->fence, fence 14434 drivers/gpu/drm/i915/display/intel_display.c if (!new_state->fence) { /* implicit fencing */ fence 14435 drivers/gpu/drm/i915/display/intel_display.c struct dma_fence *fence; fence 14444 drivers/gpu/drm/i915/display/intel_display.c fence = dma_resv_get_excl_rcu(obj->base.resv); fence 14445 drivers/gpu/drm/i915/display/intel_display.c if (fence) { fence 14446 drivers/gpu/drm/i915/display/intel_display.c add_rps_boost_after_vblank(new_state->crtc, fence); fence 14447 drivers/gpu/drm/i915/display/intel_display.c dma_fence_put(fence); fence 14450 drivers/gpu/drm/i915/display/intel_display.c add_rps_boost_after_vblank(new_state->crtc, new_state->fence); fence 159 drivers/gpu/drm/i915/display/intel_fbc.c fbc_ctl |= params->vma->fence->id; fence 180 drivers/gpu/drm/i915/display/intel_fbc.c dpfc_ctl |= DPFC_CTL_FENCE_EN | params->vma->fence->id; fence 240 drivers/gpu/drm/i915/display/intel_fbc.c dpfc_ctl |= params->vma->fence->id; fence 244 drivers/gpu/drm/i915/display/intel_fbc.c params->vma->fence->id); fence 324 drivers/gpu/drm/i915/display/intel_fbc.c params->vma->fence->id); fence 696 drivers/gpu/drm/i915/display/intel_fbc.c if (WARN_ON(cache->flags & PLANE_HAS_FENCE && !cache->vma->fence)) fence 39 drivers/gpu/drm/i915/gem/i915_gem_busy.c __busy_set_if_active(const struct dma_fence *fence, u32 (*flag)(u16 id)) fence 51 drivers/gpu/drm/i915/gem/i915_gem_busy.c if (!dma_fence_is_i915(fence)) fence 55 drivers/gpu/drm/i915/gem/i915_gem_busy.c rq = container_of(fence, const struct i915_request, fence); fence 65 drivers/gpu/drm/i915/gem/i915_gem_busy.c busy_check_reader(const struct dma_fence *fence) fence 67 drivers/gpu/drm/i915/gem/i915_gem_busy.c return __busy_set_if_active(fence, __busy_read_flag); fence 71 drivers/gpu/drm/i915/gem/i915_gem_busy.c busy_check_writer(const struct dma_fence *fence) fence 73 drivers/gpu/drm/i915/gem/i915_gem_busy.c if (!fence) fence 76 drivers/gpu/drm/i915/gem/i915_gem_busy.c return __busy_set_if_active(fence, __busy_write_id); fence 120 drivers/gpu/drm/i915/gem/i915_gem_busy.c list = rcu_dereference(obj->base.resv->fence); fence 125 drivers/gpu/drm/i915/gem/i915_gem_busy.c struct dma_fence *fence = fence 128 drivers/gpu/drm/i915/gem/i915_gem_busy.c args->busy |= busy_check_reader(fence); fence 104 drivers/gpu/drm/i915/gem/i915_gem_client_blt.c static const char *clear_pages_work_driver_name(struct dma_fence *fence) fence 109 drivers/gpu/drm/i915/gem/i915_gem_client_blt.c static const char *clear_pages_work_timeline_name(struct dma_fence *fence) fence 114 drivers/gpu/drm/i915/gem/i915_gem_client_blt.c static void clear_pages_work_release(struct dma_fence *fence) fence 116 drivers/gpu/drm/i915/gem/i915_gem_client_blt.c struct clear_pages_work *w = container_of(fence, typeof(*w), dma); fence 140 drivers/gpu/drm/i915/gem/i915_gem_client_blt.c static void clear_pages_dma_fence_cb(struct dma_fence *fence, fence 145 drivers/gpu/drm/i915/gem/i915_gem_client_blt.c if (fence->error) fence 146 drivers/gpu/drm/i915/gem/i915_gem_client_blt.c dma_fence_set_error(&w->dma, fence->error); fence 195 drivers/gpu/drm/i915/gem/i915_gem_client_blt.c if (dma_fence_add_callback(&rq->fence, &w->cb, fence 243 drivers/gpu/drm/i915/gem/i915_gem_client_blt.c clear_pages_work_notify(struct i915_sw_fence *fence, fence 246 drivers/gpu/drm/i915/gem/i915_gem_client_blt.c struct clear_pages_work *w = container_of(fence, typeof(*w), wait); fence 406 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c if (vma->fence) fence 627 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c if (vma->fence) fence 2385 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c struct drm_i915_gem_exec_fence fence; fence 2388 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c if (__copy_from_user(&fence, user++, sizeof(fence))) { fence 2393 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c if (fence.flags & __I915_EXEC_FENCE_UNKNOWN_FLAGS) { fence 2398 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c syncobj = drm_syncobj_find(file, fence.handle); fence 2408 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c fences[n] = ptr_pack_bits(syncobj, fence.flags, 2); fence 2436 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c struct dma_fence *fence; fence 2443 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c fence = drm_syncobj_fence_get(syncobj); fence 2444 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c if (!fence) fence 2447 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c err = i915_request_await_dma_fence(eb->request, fence); fence 2448 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c dma_fence_put(fence); fence 2461 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c struct dma_fence * const fence = &eb->request->fence; fence 2472 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c drm_syncobj_replace_fence(syncobj, fence); fence 2669 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c out_fence = sync_file_create(&eb.request->fence); fence 16 drivers/gpu/drm/i915/gem/i915_gem_fence.c stub_notify(struct i915_sw_fence *fence, enum i915_sw_fence_notify state) fence 18 drivers/gpu/drm/i915/gem/i915_gem_fence.c struct stub_fence *stub = container_of(fence, typeof(*stub), chain); fence 33 drivers/gpu/drm/i915/gem/i915_gem_fence.c static const char *stub_driver_name(struct dma_fence *fence) fence 38 drivers/gpu/drm/i915/gem/i915_gem_fence.c static const char *stub_timeline_name(struct dma_fence *fence) fence 43 drivers/gpu/drm/i915/gem/i915_gem_fence.c static void stub_release(struct dma_fence *fence) fence 45 drivers/gpu/drm/i915/gem/i915_gem_fence.c struct stub_fence *stub = container_of(fence, typeof(*stub), dma); fence 90 drivers/gpu/drm/i915/gem/i915_gem_fence.c struct dma_fence *fence) fence 92 drivers/gpu/drm/i915/gem/i915_gem_fence.c struct stub_fence *stub = container_of(fence, typeof(*stub), dma); fence 123 drivers/gpu/drm/i915/gem/i915_gem_object.h struct dma_fence *fence); fence 373 drivers/gpu/drm/i915/gem/i915_gem_object.h struct dma_fence *fence; fence 376 drivers/gpu/drm/i915/gem/i915_gem_object.h fence = dma_resv_get_excl_rcu(obj->base.resv); fence 379 drivers/gpu/drm/i915/gem/i915_gem_object.h if (fence && dma_fence_is_i915(fence) && !dma_fence_is_signaled(fence)) fence 380 drivers/gpu/drm/i915/gem/i915_gem_object.h engine = to_request(fence)->engine; fence 381 drivers/gpu/drm/i915/gem/i915_gem_object.h dma_fence_put(fence); fence 274 drivers/gpu/drm/i915/gem/i915_gem_tiling.c if (vma->fence) fence 275 drivers/gpu/drm/i915/gem/i915_gem_tiling.c vma->fence->dirty = true; fence 16 drivers/gpu/drm/i915/gem/i915_gem_wait.c i915_gem_object_wait_fence(struct dma_fence *fence, fence 22 drivers/gpu/drm/i915/gem/i915_gem_wait.c if (test_bit(DMA_FENCE_FLAG_SIGNALED_BIT, &fence->flags)) fence 25 drivers/gpu/drm/i915/gem/i915_gem_wait.c if (dma_fence_is_i915(fence)) fence 26 drivers/gpu/drm/i915/gem/i915_gem_wait.c return i915_request_wait(to_request(fence), flags, timeout); fence 28 drivers/gpu/drm/i915/gem/i915_gem_wait.c return dma_fence_wait_timeout(fence, fence 96 drivers/gpu/drm/i915/gem/i915_gem_wait.c static void __fence_set_priority(struct dma_fence *fence, fence 102 drivers/gpu/drm/i915/gem/i915_gem_wait.c if (dma_fence_is_signaled(fence) || !dma_fence_is_i915(fence)) fence 105 drivers/gpu/drm/i915/gem/i915_gem_wait.c rq = to_request(fence); fence 116 drivers/gpu/drm/i915/gem/i915_gem_wait.c static void fence_set_priority(struct dma_fence *fence, fence 120 drivers/gpu/drm/i915/gem/i915_gem_wait.c if (dma_fence_is_array(fence)) { fence 121 drivers/gpu/drm/i915/gem/i915_gem_wait.c struct dma_fence_array *array = to_dma_fence_array(fence); fence 127 drivers/gpu/drm/i915/gem/i915_gem_wait.c __fence_set_priority(fence, attr); fence 159 drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c vma->fence ? vma->fence->id : -1, tile->tiling, tile->stride, fence 83 drivers/gpu/drm/i915/gt/intel_breadcrumbs.c return i915_seqno_passed(__hwsp_seqno(rq), rq->fence.seqno); fence 90 drivers/gpu/drm/i915/gt/intel_breadcrumbs.c i915_seqno_passed(rq->fence.seqno, fence 91 drivers/gpu/drm/i915/gt/intel_breadcrumbs.c list_next_entry(rq, signal_link)->fence.seqno)) fence 95 drivers/gpu/drm/i915/gt/intel_breadcrumbs.c i915_seqno_passed(list_prev_entry(rq, signal_link)->fence.seqno, fence 96 drivers/gpu/drm/i915/gt/intel_breadcrumbs.c rq->fence.seqno)) fence 103 drivers/gpu/drm/i915/gt/intel_breadcrumbs.c __dma_fence_signal(struct dma_fence *fence) fence 105 drivers/gpu/drm/i915/gt/intel_breadcrumbs.c return !test_and_set_bit(DMA_FENCE_FLAG_SIGNALED_BIT, &fence->flags); fence 109 drivers/gpu/drm/i915/gt/intel_breadcrumbs.c __dma_fence_signal__timestamp(struct dma_fence *fence, ktime_t timestamp) fence 111 drivers/gpu/drm/i915/gt/intel_breadcrumbs.c fence->timestamp = timestamp; fence 112 drivers/gpu/drm/i915/gt/intel_breadcrumbs.c set_bit(DMA_FENCE_FLAG_TIMESTAMP_BIT, &fence->flags); fence 113 drivers/gpu/drm/i915/gt/intel_breadcrumbs.c trace_dma_fence_signaled(fence); fence 117 drivers/gpu/drm/i915/gt/intel_breadcrumbs.c __dma_fence_signal__notify(struct dma_fence *fence, fence 122 drivers/gpu/drm/i915/gt/intel_breadcrumbs.c lockdep_assert_held(fence->lock); fence 127 drivers/gpu/drm/i915/gt/intel_breadcrumbs.c cur->func(fence, cur); fence 157 drivers/gpu/drm/i915/gt/intel_breadcrumbs.c &rq->fence.flags)); fence 158 drivers/gpu/drm/i915/gt/intel_breadcrumbs.c clear_bit(I915_FENCE_FLAG_SIGNAL, &rq->fence.flags); fence 160 drivers/gpu/drm/i915/gt/intel_breadcrumbs.c if (!__dma_fence_signal(&rq->fence)) fence 193 drivers/gpu/drm/i915/gt/intel_breadcrumbs.c list_replace(&rq->fence.cb_list, &cb_list); fence 194 drivers/gpu/drm/i915/gt/intel_breadcrumbs.c __dma_fence_signal__timestamp(&rq->fence, timestamp); fence 195 drivers/gpu/drm/i915/gt/intel_breadcrumbs.c __dma_fence_signal__notify(&rq->fence, &cb_list); fence 280 drivers/gpu/drm/i915/gt/intel_breadcrumbs.c if (test_bit(I915_FENCE_FLAG_ACTIVE, &rq->fence.flags)) { fence 286 drivers/gpu/drm/i915/gt/intel_breadcrumbs.c GEM_BUG_ON(test_bit(I915_FENCE_FLAG_SIGNAL, &rq->fence.flags)); fence 308 drivers/gpu/drm/i915/gt/intel_breadcrumbs.c if (i915_seqno_passed(rq->fence.seqno, it->fence.seqno)) fence 316 drivers/gpu/drm/i915/gt/intel_breadcrumbs.c set_bit(I915_FENCE_FLAG_SIGNAL, &rq->fence.flags); fence 337 drivers/gpu/drm/i915/gt/intel_breadcrumbs.c if (test_bit(I915_FENCE_FLAG_SIGNAL, &rq->fence.flags)) { fence 344 drivers/gpu/drm/i915/gt/intel_breadcrumbs.c clear_bit(I915_FENCE_FLAG_SIGNAL, &rq->fence.flags); fence 365 drivers/gpu/drm/i915/gt/intel_breadcrumbs.c rq->fence.context, rq->fence.seqno, fence 1145 drivers/gpu/drm/i915/gt/intel_engine_cs.c const char *name = rq->fence.ops->get_timeline_name(&rq->fence); fence 1153 drivers/gpu/drm/i915/gt/intel_engine_cs.c rq->fence.context, rq->fence.seqno, fence 1158 drivers/gpu/drm/i915/gt/intel_engine_cs.c &rq->fence.flags) ? "+" : fence 1160 drivers/gpu/drm/i915/gt/intel_engine_cs.c &rq->fence.flags) ? "-" : fence 240 drivers/gpu/drm/i915/gt/intel_lrc.c dma_fence_set_error(&rq->fence, -EIO); fence 520 drivers/gpu/drm/i915/gt/intel_lrc.c &rq->fence.flags)) { fence 719 drivers/gpu/drm/i915/gt/intel_lrc.c ports[0]->fence.context, fence 720 drivers/gpu/drm/i915/gt/intel_lrc.c ports[0]->fence.seqno, fence 724 drivers/gpu/drm/i915/gt/intel_lrc.c ports[1] ? ports[1]->fence.context : 0, fence 725 drivers/gpu/drm/i915/gt/intel_lrc.c ports[1] ? ports[1]->fence.seqno : 0); fence 1102 drivers/gpu/drm/i915/gt/intel_lrc.c last->fence.context, fence 1103 drivers/gpu/drm/i915/gt/intel_lrc.c last->fence.seqno, fence 1129 drivers/gpu/drm/i915/gt/intel_lrc.c last->fence.context, fence 1130 drivers/gpu/drm/i915/gt/intel_lrc.c last->fence.seqno, fence 1201 drivers/gpu/drm/i915/gt/intel_lrc.c rq->fence.context, fence 1202 drivers/gpu/drm/i915/gt/intel_lrc.c rq->fence.seqno, fence 1892 drivers/gpu/drm/i915/gt/intel_lrc.c *cs++ = rq->fence.seqno - 1; fence 2945 drivers/gpu/drm/i915/gt/intel_lrc.c request->fence.seqno, fence 2955 drivers/gpu/drm/i915/gt/intel_lrc.c request->fence.seqno, fence 2974 drivers/gpu/drm/i915/gt/intel_lrc.c request->fence.seqno, fence 3544 drivers/gpu/drm/i915/gt/intel_lrc.c rq->fence.context, rq->fence.seqno, fence 3638 drivers/gpu/drm/i915/gt/intel_lrc.c rq->fence.context, fence 3639 drivers/gpu/drm/i915/gt/intel_lrc.c rq->fence.seqno); fence 121 drivers/gpu/drm/i915/gt/intel_reset.c rq->fence.context, fence 122 drivers/gpu/drm/i915/gt/intel_reset.c rq->fence.seqno, fence 132 drivers/gpu/drm/i915/gt/intel_reset.c dma_fence_set_error(&rq->fence, -EAGAIN); fence 639 drivers/gpu/drm/i915/gt/intel_reset.c GEM_BUG_ON(vma->fence != >->ggtt->fence_regs[i]); fence 719 drivers/gpu/drm/i915/gt/intel_reset.c engine->name, request->fence.context, request->fence.seqno); fence 720 drivers/gpu/drm/i915/gt/intel_reset.c dma_fence_set_error(&request->fence, -EIO); fence 830 drivers/gpu/drm/i915/gt/intel_reset.c dma_fence_default_wait(&rq->fence, false, MAX_SCHEDULE_TIMEOUT); fence 326 drivers/gpu/drm/i915/gt/intel_ringbuffer.c *cs++ = rq->fence.seqno; fence 429 drivers/gpu/drm/i915/gt/intel_ringbuffer.c *cs++ = rq->fence.seqno; fence 447 drivers/gpu/drm/i915/gt/intel_ringbuffer.c *cs++ = rq->fence.seqno; fence 467 drivers/gpu/drm/i915/gt/intel_ringbuffer.c *cs++ = rq->fence.seqno; fence 472 drivers/gpu/drm/i915/gt/intel_ringbuffer.c *cs++ = rq->fence.seqno; fence 920 drivers/gpu/drm/i915/gt/intel_ringbuffer.c dma_fence_set_error(&request->fence, -EIO); fence 947 drivers/gpu/drm/i915/gt/intel_ringbuffer.c *cs++ = rq->fence.seqno; fence 972 drivers/gpu/drm/i915/gt/intel_ringbuffer.c *cs++ = rq->fence.seqno; fence 62 drivers/gpu/drm/i915/gt/intel_timeline.h const struct dma_fence *fence) fence 64 drivers/gpu/drm/i915/gt/intel_timeline.h return __intel_timeline_sync_set(tl, fence->context, fence->seqno); fence 74 drivers/gpu/drm/i915/gt/intel_timeline.h const struct dma_fence *fence) fence 76 drivers/gpu/drm/i915/gt/intel_timeline.h return __intel_timeline_sync_is_later(tl, fence->context, fence->seqno); fence 228 drivers/gpu/drm/i915/gt/mock_engine.c dma_fence_set_error(&request->fence, -EIO); fence 111 drivers/gpu/drm/i915/gt/selftest_hangcheck.c return hws->node.start + offset_in_page(sizeof(u32)*rq->fence.context); fence 194 drivers/gpu/drm/i915/gt/selftest_hangcheck.c *batch++ = rq->fence.seqno; fence 208 drivers/gpu/drm/i915/gt/selftest_hangcheck.c *batch++ = rq->fence.seqno; fence 221 drivers/gpu/drm/i915/gt/selftest_hangcheck.c *batch++ = rq->fence.seqno; fence 233 drivers/gpu/drm/i915/gt/selftest_hangcheck.c *batch++ = rq->fence.seqno; fence 272 drivers/gpu/drm/i915/gt/selftest_hangcheck.c return READ_ONCE(h->seqno[rq->fence.context % (PAGE_SIZE/sizeof(u32))]); fence 294 drivers/gpu/drm/i915/gt/selftest_hangcheck.c rq->fence.seqno), fence 297 drivers/gpu/drm/i915/gt/selftest_hangcheck.c rq->fence.seqno), fence 612 drivers/gpu/drm/i915/gt/selftest_hangcheck.c __func__, rq->fence.seqno, hws_seqno(&h, rq)); fence 699 drivers/gpu/drm/i915/gt/selftest_hangcheck.c rq->fence.context, fence 700 drivers/gpu/drm/i915/gt/selftest_hangcheck.c rq->fence.seqno); fence 874 drivers/gpu/drm/i915/gt/selftest_hangcheck.c __func__, rq->fence.seqno, hws_seqno(&h, rq)); fence 1082 drivers/gpu/drm/i915/gt/selftest_hangcheck.c __func__, rq->fence.seqno, hws_seqno(&h, rq)); fence 1271 drivers/gpu/drm/i915/gt/selftest_hangcheck.c __func__, rq->fence.seqno, hws_seqno(&h, rq)); fence 1290 drivers/gpu/drm/i915/gt/selftest_hangcheck.c if (wait_for(!list_empty(&rq->fence.cb_list), 10)) { fence 1469 drivers/gpu/drm/i915/gt/selftest_hangcheck.c prev->fence.seqno, hws_seqno(&h, prev)); fence 1484 drivers/gpu/drm/i915/gt/selftest_hangcheck.c if (prev->fence.error != -EIO) { fence 1486 drivers/gpu/drm/i915/gt/selftest_hangcheck.c prev->fence.error); fence 1493 drivers/gpu/drm/i915/gt/selftest_hangcheck.c if (rq->fence.error) { fence 1495 drivers/gpu/drm/i915/gt/selftest_hangcheck.c rq->fence.error); fence 1575 drivers/gpu/drm/i915/gt/selftest_hangcheck.c __func__, rq->fence.seqno, hws_seqno(&h, rq)); fence 1595 drivers/gpu/drm/i915/gt/selftest_hangcheck.c if (rq->fence.error != -EIO) { fence 1664 drivers/gpu/drm/i915/gt/selftest_hangcheck.c rq->fence.seqno, hws_seqno(&h, rq)); fence 974 drivers/gpu/drm/i915/gt/selftest_lrc.c dummy_notify(struct i915_sw_fence *fence, enum i915_sw_fence_notify state) fence 993 drivers/gpu/drm/i915/gt/selftest_lrc.c rq->fence.seqno = 1; fence 994 drivers/gpu/drm/i915/gt/selftest_lrc.c BUILD_BUG_ON(sizeof(rq->fence.seqno) != 8); /* upper 32b == 0 */ fence 995 drivers/gpu/drm/i915/gt/selftest_lrc.c rq->hwsp_seqno = (u32 *)&rq->fence.seqno + 1; fence 999 drivers/gpu/drm/i915/gt/selftest_lrc.c set_bit(I915_FENCE_FLAG_ACTIVE, &rq->fence.flags); fence 1002 drivers/gpu/drm/i915/gt/selftest_lrc.c rq->fence.lock = &rq->lock; fence 1003 drivers/gpu/drm/i915/gt/selftest_lrc.c INIT_LIST_HEAD(&rq->fence.cb_list); fence 1014 drivers/gpu/drm/i915/gt/selftest_lrc.c dma_fence_signal(&dummy->fence); fence 1019 drivers/gpu/drm/i915/gt/selftest_lrc.c dma_fence_free(&dummy->fence); fence 1754 drivers/gpu/drm/i915/gt/selftest_lrc.c request[nc]->fence.context, fence 1755 drivers/gpu/drm/i915/gt/selftest_lrc.c request[nc]->fence.seqno); fence 1759 drivers/gpu/drm/i915/gt/selftest_lrc.c request[nc]->fence.context, fence 1760 drivers/gpu/drm/i915/gt/selftest_lrc.c request[nc]->fence.seqno); fence 1902 drivers/gpu/drm/i915/gt/selftest_lrc.c request[n]->fence.context, fence 1903 drivers/gpu/drm/i915/gt/selftest_lrc.c request[n]->fence.seqno); fence 1907 drivers/gpu/drm/i915/gt/selftest_lrc.c request[n]->fence.context, fence 1908 drivers/gpu/drm/i915/gt/selftest_lrc.c request[n]->fence.seqno); fence 2003 drivers/gpu/drm/i915/gt/selftest_lrc.c struct i915_sw_fence fence = {}; fence 2018 drivers/gpu/drm/i915/gt/selftest_lrc.c onstack_fence_init(&fence); fence 2020 drivers/gpu/drm/i915/gt/selftest_lrc.c &fence, fence 2035 drivers/gpu/drm/i915/gt/selftest_lrc.c onstack_fence_fini(&fence); fence 2044 drivers/gpu/drm/i915/gt/selftest_lrc.c onstack_fence_fini(&fence); fence 2051 drivers/gpu/drm/i915/gt/selftest_lrc.c onstack_fence_fini(&fence); fence 2059 drivers/gpu/drm/i915/gt/selftest_lrc.c onstack_fence_fini(&fence); fence 2065 drivers/gpu/drm/i915/gt/selftest_lrc.c &rq[0]->fence, fence 2069 drivers/gpu/drm/i915/gt/selftest_lrc.c onstack_fence_fini(&fence); fence 2073 drivers/gpu/drm/i915/gt/selftest_lrc.c onstack_fence_fini(&fence); fence 17 drivers/gpu/drm/i915/gt/uc/intel_guc_ct.c u32 fence; fence 294 drivers/gpu/drm/i915/gt/uc/intel_guc_ct.c u32 fence, fence 336 drivers/gpu/drm/i915/gt/uc/intel_guc_ct.c 4, &header, 4, &fence, fence 342 drivers/gpu/drm/i915/gt/uc/intel_guc_ct.c cmds[tail] = fence; fence 374 drivers/gpu/drm/i915/gt/uc/intel_guc_ct.c u32 fence, fence 384 drivers/gpu/drm/i915/gt/uc/intel_guc_ct.c #define done (READ_ONCE(desc->fence) == fence) fence 392 drivers/gpu/drm/i915/gt/uc/intel_guc_ct.c fence, desc->fence); fence 437 drivers/gpu/drm/i915/gt/uc/intel_guc_ct.c DRM_ERROR("CT: fence %u err %d\n", req->fence, err); fence 455 drivers/gpu/drm/i915/gt/uc/intel_guc_ct.c u32 fence; fence 463 drivers/gpu/drm/i915/gt/uc/intel_guc_ct.c fence = ctch_get_next_fence(ctch); fence 464 drivers/gpu/drm/i915/gt/uc/intel_guc_ct.c request.fence = fence; fence 473 drivers/gpu/drm/i915/gt/uc/intel_guc_ct.c err = ctb_write(ctb, action, len, fence, !!response_buf); fence 482 drivers/gpu/drm/i915/gt/uc/intel_guc_ct.c err = wait_for_ctb_desc_update(desc, fence, status); fence 629 drivers/gpu/drm/i915/gt/uc/intel_guc_ct.c u32 fence; fence 644 drivers/gpu/drm/i915/gt/uc/intel_guc_ct.c fence = msg[1]; fence 654 drivers/gpu/drm/i915/gt/uc/intel_guc_ct.c CT_DEBUG_DRIVER("CT: response fence %u status %#x\n", fence, status); fence 658 drivers/gpu/drm/i915/gt/uc/intel_guc_ct.c if (unlikely(fence != req->fence)) { fence 660 drivers/gpu/drm/i915/gt/uc/intel_guc_ct.c req->fence); fence 665 drivers/gpu/drm/i915/gt/uc/intel_guc_ct.c req->fence, 4 * msglen, msg); fence 254 drivers/gpu/drm/i915/gt/uc/intel_guc_fwif.h u32 fence; /* fence updated by GuC */ fence 471 drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c ring_tail, rq->fence.seqno); fence 710 drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c dma_fence_set_error(&rq->fence, -EIO); fence 723 drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c dma_fence_set_error(&rq->fence, -EIO); fence 128 drivers/gpu/drm/i915/gvt/aperture_gm.c u32 fence, u64 value) fence 137 drivers/gpu/drm/i915/gvt/aperture_gm.c if (WARN_ON(fence >= vgpu_fence_sz(vgpu))) fence 140 drivers/gpu/drm/i915/gvt/aperture_gm.c reg = vgpu->fence.regs[fence]; fence 178 drivers/gpu/drm/i915/gvt/aperture_gm.c reg = vgpu->fence.regs[i]; fence 180 drivers/gpu/drm/i915/gvt/aperture_gm.c vgpu->fence.regs[i] = NULL; fence 205 drivers/gpu/drm/i915/gvt/aperture_gm.c vgpu->fence.regs[i] = reg; fence 217 drivers/gpu/drm/i915/gvt/aperture_gm.c reg = vgpu->fence.regs[i]; fence 221 drivers/gpu/drm/i915/gvt/aperture_gm.c vgpu->fence.regs[i] = NULL; fence 234 drivers/gpu/drm/i915/gvt/aperture_gm.c gvt->fence.vgpu_allocated_fence_num -= vgpu_fence_sz(vgpu); fence 273 drivers/gpu/drm/i915/gvt/aperture_gm.c taken = gvt->fence.vgpu_allocated_fence_num; fence 284 drivers/gpu/drm/i915/gvt/aperture_gm.c gvt->fence.vgpu_allocated_fence_num += param->fence_sz; fence 104 drivers/gpu/drm/i915/gvt/gvt.c type->fence, vgpu_edid_str(type->resolution), fence 185 drivers/gpu/drm/i915/gvt/gvt.h struct intel_vgpu_fence fence; fence 296 drivers/gpu/drm/i915/gvt/gvt.h unsigned int fence; fence 314 drivers/gpu/drm/i915/gvt/gvt.h struct intel_gvt_fence fence; fence 421 drivers/gpu/drm/i915/gvt/gvt.h #define vgpu_fence_base(vgpu) (vgpu->fence.base) fence 422 drivers/gpu/drm/i915/gvt/gvt.h #define vgpu_fence_sz(vgpu) (vgpu->fence.size) fence 441 drivers/gpu/drm/i915/gvt/gvt.h u32 fence, u64 value); fence 931 drivers/gpu/drm/i915/gvt/scheduler.c if (workload->req->fence.error == -EIO) fence 82 drivers/gpu/drm/i915/gvt/vgpu.c unsigned int fence; fence 140 drivers/gpu/drm/i915/gvt/vgpu.c gvt->types[i].fence = vgpu_types[i].fence; fence 162 drivers/gpu/drm/i915/gvt/vgpu.c gvt->types[i].high_gm_size, gvt->types[i].fence, fence 190 drivers/gpu/drm/i915/gvt/vgpu.c gvt->fence.vgpu_allocated_fence_num; fence 195 drivers/gpu/drm/i915/gvt/vgpu.c fence_min = fence_avail / gvt->types[i].fence; fence 202 drivers/gpu/drm/i915/gvt/vgpu.c gvt->types[i].high_gm_size, gvt->types[i].fence); fence 486 drivers/gpu/drm/i915/gvt/vgpu.c param.fence_sz = type->fence; fence 457 drivers/gpu/drm/i915/i915_active.c return barrier ? i915_request_await_dma_fence(rq, &barrier->fence) : 0; fence 213 drivers/gpu/drm/i915/i915_debugfs.c if (vma->fence) fence 214 drivers/gpu/drm/i915/i915_debugfs.c seq_printf(m, " , fence: %d", vma->fence->id); fence 268 drivers/gpu/drm/i915/i915_gem.c struct dma_fence *fence; fence 277 drivers/gpu/drm/i915/i915_gem.c fence = i915_gem_object_lock_fence(obj); fence 279 drivers/gpu/drm/i915/i915_gem.c if (!fence) fence 299 drivers/gpu/drm/i915/i915_gem.c i915_gem_object_unlock_fence(obj, fence); fence 335 drivers/gpu/drm/i915/i915_gem.c struct dma_fence *fence; fence 374 drivers/gpu/drm/i915/i915_gem.c fence = i915_gem_object_lock_fence(obj); fence 376 drivers/gpu/drm/i915/i915_gem.c if (!fence) { fence 415 drivers/gpu/drm/i915/i915_gem.c i915_gem_object_unlock_fence(obj, fence); fence 528 drivers/gpu/drm/i915/i915_gem.c struct dma_fence *fence; fence 584 drivers/gpu/drm/i915/i915_gem.c fence = i915_gem_object_lock_fence(obj); fence 586 drivers/gpu/drm/i915/i915_gem.c if (!fence) { fence 635 drivers/gpu/drm/i915/i915_gem.c i915_gem_object_unlock_fence(obj, fence); fence 686 drivers/gpu/drm/i915/i915_gem.c struct dma_fence *fence; fence 695 drivers/gpu/drm/i915/i915_gem.c fence = i915_gem_object_lock_fence(obj); fence 697 drivers/gpu/drm/i915/i915_gem.c if (!fence) fence 727 drivers/gpu/drm/i915/i915_gem.c i915_gem_object_unlock_fence(obj, fence); fence 1047 drivers/gpu/drm/i915/i915_gem.c if (vma->fence && !i915_gem_object_is_tiled(obj)) { fence 62 drivers/gpu/drm/i915/i915_gem_fence_reg.c static void i965_write_fence_reg(struct i915_fence_reg *fence, fence 69 drivers/gpu/drm/i915/i915_gem_fence_reg.c if (INTEL_GEN(fence->i915) >= 6) { fence 70 drivers/gpu/drm/i915/i915_gem_fence_reg.c fence_reg_lo = FENCE_REG_GEN6_LO(fence->id); fence 71 drivers/gpu/drm/i915/i915_gem_fence_reg.c fence_reg_hi = FENCE_REG_GEN6_HI(fence->id); fence 75 drivers/gpu/drm/i915/i915_gem_fence_reg.c fence_reg_lo = FENCE_REG_965_LO(fence->id); fence 76 drivers/gpu/drm/i915/i915_gem_fence_reg.c fence_reg_hi = FENCE_REG_965_HI(fence->id); fence 98 drivers/gpu/drm/i915/i915_gem_fence_reg.c struct intel_uncore *uncore = &fence->i915->uncore; fence 119 drivers/gpu/drm/i915/i915_gem_fence_reg.c static void i915_write_fence_reg(struct i915_fence_reg *fence, fence 135 drivers/gpu/drm/i915/i915_gem_fence_reg.c if (is_y_tiled && HAS_128_BYTE_Y_TILING(fence->i915)) fence 151 drivers/gpu/drm/i915/i915_gem_fence_reg.c struct intel_uncore *uncore = &fence->i915->uncore; fence 152 drivers/gpu/drm/i915/i915_gem_fence_reg.c i915_reg_t reg = FENCE_REG(fence->id); fence 159 drivers/gpu/drm/i915/i915_gem_fence_reg.c static void i830_write_fence_reg(struct i915_fence_reg *fence, fence 183 drivers/gpu/drm/i915/i915_gem_fence_reg.c struct intel_uncore *uncore = &fence->i915->uncore; fence 184 drivers/gpu/drm/i915/i915_gem_fence_reg.c i915_reg_t reg = FENCE_REG(fence->id); fence 191 drivers/gpu/drm/i915/i915_gem_fence_reg.c static void fence_write(struct i915_fence_reg *fence, fence 200 drivers/gpu/drm/i915/i915_gem_fence_reg.c if (IS_GEN(fence->i915, 2)) fence 201 drivers/gpu/drm/i915/i915_gem_fence_reg.c i830_write_fence_reg(fence, vma); fence 202 drivers/gpu/drm/i915/i915_gem_fence_reg.c else if (IS_GEN(fence->i915, 3)) fence 203 drivers/gpu/drm/i915/i915_gem_fence_reg.c i915_write_fence_reg(fence, vma); fence 205 drivers/gpu/drm/i915/i915_gem_fence_reg.c i965_write_fence_reg(fence, vma); fence 212 drivers/gpu/drm/i915/i915_gem_fence_reg.c fence->dirty = false; fence 215 drivers/gpu/drm/i915/i915_gem_fence_reg.c static int fence_update(struct i915_fence_reg *fence, fence 238 drivers/gpu/drm/i915/i915_gem_fence_reg.c old = xchg(&fence->vma, NULL); fence 242 drivers/gpu/drm/i915/i915_gem_fence_reg.c fence->vma = old; fence 253 drivers/gpu/drm/i915/i915_gem_fence_reg.c GEM_BUG_ON(old->fence != fence); fence 255 drivers/gpu/drm/i915/i915_gem_fence_reg.c old->fence = NULL; fence 258 drivers/gpu/drm/i915/i915_gem_fence_reg.c list_move(&fence->link, &fence->i915->ggtt.fence_list); fence 271 drivers/gpu/drm/i915/i915_gem_fence_reg.c wakeref = intel_runtime_pm_get_if_in_use(&fence->i915->runtime_pm); fence 277 drivers/gpu/drm/i915/i915_gem_fence_reg.c WRITE_ONCE(fence->vma, vma); fence 278 drivers/gpu/drm/i915/i915_gem_fence_reg.c fence_write(fence, vma); fence 281 drivers/gpu/drm/i915/i915_gem_fence_reg.c vma->fence = fence; fence 282 drivers/gpu/drm/i915/i915_gem_fence_reg.c list_move_tail(&fence->link, &fence->i915->ggtt.fence_list); fence 285 drivers/gpu/drm/i915/i915_gem_fence_reg.c intel_runtime_pm_put(&fence->i915->runtime_pm, wakeref); fence 302 drivers/gpu/drm/i915/i915_gem_fence_reg.c struct i915_fence_reg *fence = vma->fence; fence 305 drivers/gpu/drm/i915/i915_gem_fence_reg.c if (!fence) fence 308 drivers/gpu/drm/i915/i915_gem_fence_reg.c if (atomic_read(&fence->pin_count)) fence 311 drivers/gpu/drm/i915/i915_gem_fence_reg.c return fence_update(fence, NULL); fence 316 drivers/gpu/drm/i915/i915_gem_fence_reg.c struct i915_fence_reg *fence; fence 318 drivers/gpu/drm/i915/i915_gem_fence_reg.c list_for_each_entry(fence, &i915->ggtt.fence_list, link) { fence 319 drivers/gpu/drm/i915/i915_gem_fence_reg.c GEM_BUG_ON(fence->vma && fence->vma->fence != fence); fence 321 drivers/gpu/drm/i915/i915_gem_fence_reg.c if (atomic_read(&fence->pin_count)) fence 324 drivers/gpu/drm/i915/i915_gem_fence_reg.c return fence; fence 337 drivers/gpu/drm/i915/i915_gem_fence_reg.c struct i915_fence_reg *fence; fence 342 drivers/gpu/drm/i915/i915_gem_fence_reg.c if (vma->fence) { fence 343 drivers/gpu/drm/i915/i915_gem_fence_reg.c fence = vma->fence; fence 344 drivers/gpu/drm/i915/i915_gem_fence_reg.c GEM_BUG_ON(fence->vma != vma); fence 345 drivers/gpu/drm/i915/i915_gem_fence_reg.c atomic_inc(&fence->pin_count); fence 346 drivers/gpu/drm/i915/i915_gem_fence_reg.c if (!fence->dirty) { fence 347 drivers/gpu/drm/i915/i915_gem_fence_reg.c list_move_tail(&fence->link, &ggtt->fence_list); fence 351 drivers/gpu/drm/i915/i915_gem_fence_reg.c fence = fence_find(vma->vm->i915); fence 352 drivers/gpu/drm/i915/i915_gem_fence_reg.c if (IS_ERR(fence)) fence 353 drivers/gpu/drm/i915/i915_gem_fence_reg.c return PTR_ERR(fence); fence 355 drivers/gpu/drm/i915/i915_gem_fence_reg.c GEM_BUG_ON(atomic_read(&fence->pin_count)); fence 356 drivers/gpu/drm/i915/i915_gem_fence_reg.c atomic_inc(&fence->pin_count); fence 361 drivers/gpu/drm/i915/i915_gem_fence_reg.c err = fence_update(fence, set); fence 365 drivers/gpu/drm/i915/i915_gem_fence_reg.c GEM_BUG_ON(fence->vma != set); fence 366 drivers/gpu/drm/i915/i915_gem_fence_reg.c GEM_BUG_ON(vma->fence != (set ? fence : NULL)); fence 372 drivers/gpu/drm/i915/i915_gem_fence_reg.c atomic_dec(&fence->pin_count); fence 426 drivers/gpu/drm/i915/i915_gem_fence_reg.c struct i915_fence_reg *fence; fence 434 drivers/gpu/drm/i915/i915_gem_fence_reg.c list_for_each_entry(fence, &ggtt->fence_list, link) fence 435 drivers/gpu/drm/i915/i915_gem_fence_reg.c count += !atomic_read(&fence->pin_count); fence 439 drivers/gpu/drm/i915/i915_gem_fence_reg.c fence = fence_find(i915); fence 440 drivers/gpu/drm/i915/i915_gem_fence_reg.c if (IS_ERR(fence)) fence 441 drivers/gpu/drm/i915/i915_gem_fence_reg.c return fence; fence 443 drivers/gpu/drm/i915/i915_gem_fence_reg.c if (fence->vma) { fence 445 drivers/gpu/drm/i915/i915_gem_fence_reg.c ret = fence_update(fence, NULL); fence 450 drivers/gpu/drm/i915/i915_gem_fence_reg.c list_del(&fence->link); fence 452 drivers/gpu/drm/i915/i915_gem_fence_reg.c return fence; fence 461 drivers/gpu/drm/i915/i915_gem_fence_reg.c void i915_unreserve_fence(struct i915_fence_reg *fence) fence 463 drivers/gpu/drm/i915/i915_gem_fence_reg.c struct i915_ggtt *ggtt = &fence->i915->ggtt; fence 467 drivers/gpu/drm/i915/i915_gem_fence_reg.c list_add(&fence->link, &ggtt->fence_list); fence 487 drivers/gpu/drm/i915/i915_gem_fence_reg.c GEM_BUG_ON(vma && vma->fence != reg); fence 847 drivers/gpu/drm/i915/i915_gem_fence_reg.c struct i915_fence_reg *fence = &ggtt->fence_regs[i]; fence 849 drivers/gpu/drm/i915/i915_gem_fence_reg.c fence->i915 = i915; fence 850 drivers/gpu/drm/i915/i915_gem_fence_reg.c fence->id = i; fence 851 drivers/gpu/drm/i915/i915_gem_fence_reg.c list_add_tail(&fence->link, &ggtt->fence_list); fence 59 drivers/gpu/drm/i915/i915_gem_fence_reg.h void i915_unreserve_fence(struct i915_fence_reg *fence); fence 723 drivers/gpu/drm/i915/i915_gpu_error.c err_printf(m, " fence[%d] = %08llx\n", i, error->fence[i]); fence 1046 drivers/gpu/drm/i915/i915_gpu_error.c error->fence[i] = fence 1051 drivers/gpu/drm/i915/i915_gpu_error.c error->fence[i] = fence 1056 drivers/gpu/drm/i915/i915_gpu_error.c error->fence[i] = fence 1174 drivers/gpu/drm/i915/i915_gpu_error.c erq->flags = request->fence.flags; fence 1175 drivers/gpu/drm/i915/i915_gpu_error.c erq->context = request->fence.context; fence 1176 drivers/gpu/drm/i915/i915_gpu_error.c erq->seqno = request->fence.seqno; fence 79 drivers/gpu/drm/i915/i915_gpu_error.h u64 fence[I915_MAX_NUM_FENCES]; fence 44 drivers/gpu/drm/i915/i915_request.c struct i915_sw_fence *fence; fence 56 drivers/gpu/drm/i915/i915_request.c static const char *i915_fence_get_driver_name(struct dma_fence *fence) fence 61 drivers/gpu/drm/i915/i915_request.c static const char *i915_fence_get_timeline_name(struct dma_fence *fence) fence 72 drivers/gpu/drm/i915/i915_request.c if (test_bit(DMA_FENCE_FLAG_SIGNALED_BIT, &fence->flags)) fence 75 drivers/gpu/drm/i915/i915_request.c return to_request(fence)->gem_context->name ?: "[i915]"; fence 78 drivers/gpu/drm/i915/i915_request.c static bool i915_fence_signaled(struct dma_fence *fence) fence 80 drivers/gpu/drm/i915/i915_request.c return i915_request_completed(to_request(fence)); fence 83 drivers/gpu/drm/i915/i915_request.c static bool i915_fence_enable_signaling(struct dma_fence *fence) fence 85 drivers/gpu/drm/i915/i915_request.c return i915_request_enable_breadcrumb(to_request(fence)); fence 88 drivers/gpu/drm/i915/i915_request.c static signed long i915_fence_wait(struct dma_fence *fence, fence 92 drivers/gpu/drm/i915/i915_request.c return i915_request_wait(to_request(fence), fence 97 drivers/gpu/drm/i915/i915_request.c static void i915_fence_release(struct dma_fence *fence) fence 99 drivers/gpu/drm/i915/i915_request.c struct i915_request *rq = to_request(fence); fence 127 drivers/gpu/drm/i915/i915_request.c i915_sw_fence_complete(cb->fence); fence 135 drivers/gpu/drm/i915/i915_request.c cb->hook(container_of(cb->fence, struct i915_request, submit), fence 136 drivers/gpu/drm/i915/i915_request.c &cb->signal->fence); fence 228 drivers/gpu/drm/i915/i915_request.c rq->fence.context, rq->fence.seqno, fence 288 drivers/gpu/drm/i915/i915_request.c dma_fence_signal_locked(&rq->fence); fence 289 drivers/gpu/drm/i915/i915_request.c if (test_bit(DMA_FENCE_FLAG_ENABLE_SIGNAL_BIT, &rq->fence.flags)) fence 295 drivers/gpu/drm/i915/i915_request.c if (!test_bit(I915_FENCE_FLAG_ACTIVE, &rq->fence.flags)) { fence 296 drivers/gpu/drm/i915/i915_request.c set_bit(I915_FENCE_FLAG_ACTIVE, &rq->fence.flags); fence 324 drivers/gpu/drm/i915/i915_request.c rq->fence.context, rq->fence.seqno, fence 346 drivers/gpu/drm/i915/i915_request.c hook(rq, &signal->fence); fence 354 drivers/gpu/drm/i915/i915_request.c cb->fence = &rq->submit; fence 355 drivers/gpu/drm/i915/i915_request.c i915_sw_fence_await(cb->fence); fence 367 drivers/gpu/drm/i915/i915_request.c hook(rq, &signal->fence); fence 370 drivers/gpu/drm/i915/i915_request.c i915_sw_fence_complete(cb->fence); fence 387 drivers/gpu/drm/i915/i915_request.c request->fence.context, request->fence.seqno, fence 445 drivers/gpu/drm/i915/i915_request.c if (!test_and_set_bit(I915_FENCE_FLAG_ACTIVE, &request->fence.flags)) fence 448 drivers/gpu/drm/i915/i915_request.c if (test_bit(DMA_FENCE_FLAG_ENABLE_SIGNAL_BIT, &request->fence.flags) && fence 449 drivers/gpu/drm/i915/i915_request.c !test_bit(DMA_FENCE_FLAG_SIGNALED_BIT, &request->fence.flags) && fence 479 drivers/gpu/drm/i915/i915_request.c request->fence.context, request->fence.seqno, fence 493 drivers/gpu/drm/i915/i915_request.c if (test_bit(DMA_FENCE_FLAG_ENABLE_SIGNAL_BIT, &request->fence.flags)) fence 496 drivers/gpu/drm/i915/i915_request.c GEM_BUG_ON(!test_bit(I915_FENCE_FLAG_ACTIVE, &request->fence.flags)); fence 497 drivers/gpu/drm/i915/i915_request.c clear_bit(I915_FENCE_FLAG_ACTIVE, &request->fence.flags); fence 530 drivers/gpu/drm/i915/i915_request.c submit_notify(struct i915_sw_fence *fence, enum i915_sw_fence_notify state) fence 533 drivers/gpu/drm/i915/i915_request.c container_of(fence, typeof(*request), submit); fence 539 drivers/gpu/drm/i915/i915_request.c if (unlikely(fence->error)) fence 540 drivers/gpu/drm/i915/i915_request.c i915_request_skip(request, fence->error); fence 573 drivers/gpu/drm/i915/i915_request.c semaphore_notify(struct i915_sw_fence *fence, enum i915_sw_fence_notify state) fence 575 drivers/gpu/drm/i915/i915_request.c struct i915_request *rq = container_of(fence, typeof(*rq), semaphore); fence 701 drivers/gpu/drm/i915/i915_request.c dma_fence_init(&rq->fence, &i915_fence_ops, &rq->lock, fence 805 drivers/gpu/drm/i915/i915_request.c if (intel_timeline_sync_is_later(rq->timeline, &signal->fence)) fence 809 drivers/gpu/drm/i915/i915_request.c &signal->fence, 0, fence 846 drivers/gpu/drm/i915/i915_request.c &from->fence, 0, fence 879 drivers/gpu/drm/i915/i915_request.c *cs++ = from->fence.seqno; fence 898 drivers/gpu/drm/i915/i915_request.c i915_sw_fence_set_error_once(&to->submit, from->fence.error); fence 917 drivers/gpu/drm/i915/i915_request.c &from->fence, 0, fence 925 drivers/gpu/drm/i915/i915_request.c &from->fence, 0, fence 935 drivers/gpu/drm/i915/i915_request.c i915_request_await_dma_fence(struct i915_request *rq, struct dma_fence *fence) fence 937 drivers/gpu/drm/i915/i915_request.c struct dma_fence **child = &fence; fence 949 drivers/gpu/drm/i915/i915_request.c if (dma_fence_is_array(fence)) { fence 950 drivers/gpu/drm/i915/i915_request.c struct dma_fence_array *array = to_dma_fence_array(fence); fence 958 drivers/gpu/drm/i915/i915_request.c fence = *child++; fence 959 drivers/gpu/drm/i915/i915_request.c if (test_bit(DMA_FENCE_FLAG_SIGNALED_BIT, &fence->flags)) fence 967 drivers/gpu/drm/i915/i915_request.c if (fence->context == rq->fence.context) fence 971 drivers/gpu/drm/i915/i915_request.c if (fence->context && fence 972 drivers/gpu/drm/i915/i915_request.c intel_timeline_sync_is_later(rq->timeline, fence)) fence 975 drivers/gpu/drm/i915/i915_request.c if (dma_fence_is_i915(fence)) fence 976 drivers/gpu/drm/i915/i915_request.c ret = i915_request_await_request(rq, to_request(fence)); fence 978 drivers/gpu/drm/i915/i915_request.c ret = i915_sw_fence_await_dma_fence(&rq->submit, fence, fence 985 drivers/gpu/drm/i915/i915_request.c if (fence->context) fence 986 drivers/gpu/drm/i915/i915_request.c intel_timeline_sync_set(rq->timeline, fence); fence 994 drivers/gpu/drm/i915/i915_request.c struct dma_fence *fence, fence 998 drivers/gpu/drm/i915/i915_request.c struct dma_fence **child = &fence; fence 1002 drivers/gpu/drm/i915/i915_request.c if (dma_fence_is_array(fence)) { fence 1003 drivers/gpu/drm/i915/i915_request.c struct dma_fence_array *array = to_dma_fence_array(fence); fence 1013 drivers/gpu/drm/i915/i915_request.c fence = *child++; fence 1014 drivers/gpu/drm/i915/i915_request.c if (test_bit(DMA_FENCE_FLAG_SIGNALED_BIT, &fence->flags)) fence 1022 drivers/gpu/drm/i915/i915_request.c if (dma_fence_is_i915(fence)) fence 1024 drivers/gpu/drm/i915/i915_request.c to_request(fence), fence 1028 drivers/gpu/drm/i915/i915_request.c ret = i915_sw_fence_await_dma_fence(&rq->submit, fence, fence 1106 drivers/gpu/drm/i915/i915_request.c dma_fence_set_error(&rq->fence, error); fence 1160 drivers/gpu/drm/i915/i915_request.c &prev->fence, fence 1176 drivers/gpu/drm/i915/i915_request.c GEM_BUG_ON(timeline->seqno != rq->fence.seqno); fence 1194 drivers/gpu/drm/i915/i915_request.c engine->name, rq->fence.context, rq->fence.seqno); fence 1387 drivers/gpu/drm/i915/i915_request.c static void request_wait_wake(struct dma_fence *fence, struct dma_fence_cb *cb) fence 1420 drivers/gpu/drm/i915/i915_request.c if (dma_fence_is_signaled(&rq->fence)) fence 1461 drivers/gpu/drm/i915/i915_request.c dma_fence_signal(&rq->fence); fence 1484 drivers/gpu/drm/i915/i915_request.c if (dma_fence_add_callback(&rq->fence, &wait.cb, request_wait_wake)) fence 1491 drivers/gpu/drm/i915/i915_request.c dma_fence_signal(&rq->fence); fence 1509 drivers/gpu/drm/i915/i915_request.c dma_fence_remove_callback(&rq->fence, &wait.cb); fence 97 drivers/gpu/drm/i915/i915_request.h struct dma_fence fence; fence 242 drivers/gpu/drm/i915/i915_request.h static inline bool dma_fence_is_i915(const struct dma_fence *fence) fence 244 drivers/gpu/drm/i915/i915_request.h return fence->ops == &i915_fence_ops; fence 259 drivers/gpu/drm/i915/i915_request.h to_request(struct dma_fence *fence) fence 262 drivers/gpu/drm/i915/i915_request.h BUILD_BUG_ON(offsetof(struct i915_request, fence) != 0); fence 263 drivers/gpu/drm/i915/i915_request.h GEM_BUG_ON(fence && !dma_fence_is_i915(fence)); fence 264 drivers/gpu/drm/i915/i915_request.h return container_of(fence, struct i915_request, fence); fence 270 drivers/gpu/drm/i915/i915_request.h return to_request(dma_fence_get(&rq->fence)); fence 276 drivers/gpu/drm/i915/i915_request.h return to_request(dma_fence_get_rcu(&rq->fence)); fence 282 drivers/gpu/drm/i915/i915_request.h dma_fence_put(&rq->fence); fence 289 drivers/gpu/drm/i915/i915_request.h struct dma_fence *fence); fence 291 drivers/gpu/drm/i915/i915_request.h struct dma_fence *fence, fence 322 drivers/gpu/drm/i915/i915_request.h return test_bit(DMA_FENCE_FLAG_SIGNALED_BIT, &rq->fence.flags); fence 327 drivers/gpu/drm/i915/i915_request.h return test_bit(I915_FENCE_FLAG_ACTIVE, &rq->fence.flags); fence 369 drivers/gpu/drm/i915/i915_request.h return i915_seqno_passed(hwsp_seqno(rq), rq->fence.seqno - 1); fence 428 drivers/gpu/drm/i915/i915_request.h return i915_seqno_passed(hwsp_seqno(rq), rq->fence.seqno); fence 433 drivers/gpu/drm/i915/i915_request.h rq->hwsp_seqno = (u32 *)&rq->fence.seqno; /* decouple from HWSP */ fence 36 drivers/gpu/drm/i915/i915_sw_fence.c static inline void debug_fence_init(struct i915_sw_fence *fence) fence 38 drivers/gpu/drm/i915/i915_sw_fence.c debug_object_init(fence, &i915_sw_fence_debug_descr); fence 41 drivers/gpu/drm/i915/i915_sw_fence.c static inline void debug_fence_init_onstack(struct i915_sw_fence *fence) fence 43 drivers/gpu/drm/i915/i915_sw_fence.c debug_object_init_on_stack(fence, &i915_sw_fence_debug_descr); fence 46 drivers/gpu/drm/i915/i915_sw_fence.c static inline void debug_fence_activate(struct i915_sw_fence *fence) fence 48 drivers/gpu/drm/i915/i915_sw_fence.c debug_object_activate(fence, &i915_sw_fence_debug_descr); fence 51 drivers/gpu/drm/i915/i915_sw_fence.c static inline void debug_fence_set_state(struct i915_sw_fence *fence, fence 54 drivers/gpu/drm/i915/i915_sw_fence.c debug_object_active_state(fence, &i915_sw_fence_debug_descr, old, new); fence 57 drivers/gpu/drm/i915/i915_sw_fence.c static inline void debug_fence_deactivate(struct i915_sw_fence *fence) fence 59 drivers/gpu/drm/i915/i915_sw_fence.c debug_object_deactivate(fence, &i915_sw_fence_debug_descr); fence 62 drivers/gpu/drm/i915/i915_sw_fence.c static inline void debug_fence_destroy(struct i915_sw_fence *fence) fence 64 drivers/gpu/drm/i915/i915_sw_fence.c debug_object_destroy(fence, &i915_sw_fence_debug_descr); fence 67 drivers/gpu/drm/i915/i915_sw_fence.c static inline void debug_fence_free(struct i915_sw_fence *fence) fence 69 drivers/gpu/drm/i915/i915_sw_fence.c debug_object_free(fence, &i915_sw_fence_debug_descr); fence 73 drivers/gpu/drm/i915/i915_sw_fence.c static inline void debug_fence_assert(struct i915_sw_fence *fence) fence 75 drivers/gpu/drm/i915/i915_sw_fence.c debug_object_assert_init(fence, &i915_sw_fence_debug_descr); fence 80 drivers/gpu/drm/i915/i915_sw_fence.c static inline void debug_fence_init(struct i915_sw_fence *fence) fence 84 drivers/gpu/drm/i915/i915_sw_fence.c static inline void debug_fence_init_onstack(struct i915_sw_fence *fence) fence 88 drivers/gpu/drm/i915/i915_sw_fence.c static inline void debug_fence_activate(struct i915_sw_fence *fence) fence 92 drivers/gpu/drm/i915/i915_sw_fence.c static inline void debug_fence_set_state(struct i915_sw_fence *fence, fence 97 drivers/gpu/drm/i915/i915_sw_fence.c static inline void debug_fence_deactivate(struct i915_sw_fence *fence) fence 101 drivers/gpu/drm/i915/i915_sw_fence.c static inline void debug_fence_destroy(struct i915_sw_fence *fence) fence 105 drivers/gpu/drm/i915/i915_sw_fence.c static inline void debug_fence_free(struct i915_sw_fence *fence) fence 109 drivers/gpu/drm/i915/i915_sw_fence.c static inline void debug_fence_assert(struct i915_sw_fence *fence) fence 115 drivers/gpu/drm/i915/i915_sw_fence.c static int __i915_sw_fence_notify(struct i915_sw_fence *fence, fence 120 drivers/gpu/drm/i915/i915_sw_fence.c fn = (i915_sw_fence_notify_t)(fence->flags & I915_SW_FENCE_MASK); fence 121 drivers/gpu/drm/i915/i915_sw_fence.c return fn(fence, state); fence 125 drivers/gpu/drm/i915/i915_sw_fence.c void i915_sw_fence_fini(struct i915_sw_fence *fence) fence 127 drivers/gpu/drm/i915/i915_sw_fence.c debug_fence_free(fence); fence 131 drivers/gpu/drm/i915/i915_sw_fence.c static void __i915_sw_fence_wake_up_all(struct i915_sw_fence *fence, fence 134 drivers/gpu/drm/i915/i915_sw_fence.c wait_queue_head_t *x = &fence->wait; fence 138 drivers/gpu/drm/i915/i915_sw_fence.c debug_fence_deactivate(fence); fence 139 drivers/gpu/drm/i915/i915_sw_fence.c atomic_set_release(&fence->pending, -1); /* 0 -> -1 [done] */ fence 162 drivers/gpu/drm/i915/i915_sw_fence.c TASK_NORMAL, fence->error, fence 174 drivers/gpu/drm/i915/i915_sw_fence.c debug_fence_assert(fence); fence 177 drivers/gpu/drm/i915/i915_sw_fence.c static void __i915_sw_fence_complete(struct i915_sw_fence *fence, fence 180 drivers/gpu/drm/i915/i915_sw_fence.c debug_fence_assert(fence); fence 182 drivers/gpu/drm/i915/i915_sw_fence.c if (!atomic_dec_and_test(&fence->pending)) fence 185 drivers/gpu/drm/i915/i915_sw_fence.c debug_fence_set_state(fence, DEBUG_FENCE_IDLE, DEBUG_FENCE_NOTIFY); fence 187 drivers/gpu/drm/i915/i915_sw_fence.c if (__i915_sw_fence_notify(fence, FENCE_COMPLETE) != NOTIFY_DONE) fence 190 drivers/gpu/drm/i915/i915_sw_fence.c debug_fence_set_state(fence, DEBUG_FENCE_NOTIFY, DEBUG_FENCE_IDLE); fence 192 drivers/gpu/drm/i915/i915_sw_fence.c __i915_sw_fence_wake_up_all(fence, continuation); fence 194 drivers/gpu/drm/i915/i915_sw_fence.c debug_fence_destroy(fence); fence 195 drivers/gpu/drm/i915/i915_sw_fence.c __i915_sw_fence_notify(fence, FENCE_FREE); fence 198 drivers/gpu/drm/i915/i915_sw_fence.c void i915_sw_fence_complete(struct i915_sw_fence *fence) fence 200 drivers/gpu/drm/i915/i915_sw_fence.c debug_fence_assert(fence); fence 202 drivers/gpu/drm/i915/i915_sw_fence.c if (WARN_ON(i915_sw_fence_done(fence))) fence 205 drivers/gpu/drm/i915/i915_sw_fence.c __i915_sw_fence_complete(fence, NULL); fence 208 drivers/gpu/drm/i915/i915_sw_fence.c void i915_sw_fence_await(struct i915_sw_fence *fence) fence 210 drivers/gpu/drm/i915/i915_sw_fence.c debug_fence_assert(fence); fence 211 drivers/gpu/drm/i915/i915_sw_fence.c WARN_ON(atomic_inc_return(&fence->pending) <= 1); fence 214 drivers/gpu/drm/i915/i915_sw_fence.c void __i915_sw_fence_init(struct i915_sw_fence *fence, fence 221 drivers/gpu/drm/i915/i915_sw_fence.c debug_fence_init(fence); fence 223 drivers/gpu/drm/i915/i915_sw_fence.c __init_waitqueue_head(&fence->wait, name, key); fence 224 drivers/gpu/drm/i915/i915_sw_fence.c atomic_set(&fence->pending, 1); fence 225 drivers/gpu/drm/i915/i915_sw_fence.c fence->error = 0; fence 227 drivers/gpu/drm/i915/i915_sw_fence.c fence->flags = (unsigned long)fn; fence 230 drivers/gpu/drm/i915/i915_sw_fence.c void i915_sw_fence_commit(struct i915_sw_fence *fence) fence 232 drivers/gpu/drm/i915/i915_sw_fence.c debug_fence_activate(fence); fence 233 drivers/gpu/drm/i915/i915_sw_fence.c i915_sw_fence_complete(fence); fence 248 drivers/gpu/drm/i915/i915_sw_fence.c static bool __i915_sw_fence_check_if_after(struct i915_sw_fence *fence, fence 253 drivers/gpu/drm/i915/i915_sw_fence.c if (__test_and_set_bit(I915_SW_FENCE_CHECKED_BIT, &fence->flags)) fence 256 drivers/gpu/drm/i915/i915_sw_fence.c if (fence == signaler) fence 259 drivers/gpu/drm/i915/i915_sw_fence.c list_for_each_entry(wq, &fence->wait.head, entry) { fence 270 drivers/gpu/drm/i915/i915_sw_fence.c static void __i915_sw_fence_clear_checked_bit(struct i915_sw_fence *fence) fence 274 drivers/gpu/drm/i915/i915_sw_fence.c if (!__test_and_clear_bit(I915_SW_FENCE_CHECKED_BIT, &fence->flags)) fence 277 drivers/gpu/drm/i915/i915_sw_fence.c list_for_each_entry(wq, &fence->wait.head, entry) { fence 285 drivers/gpu/drm/i915/i915_sw_fence.c static bool i915_sw_fence_check_if_after(struct i915_sw_fence *fence, fence 295 drivers/gpu/drm/i915/i915_sw_fence.c err = __i915_sw_fence_check_if_after(fence, signaler); fence 296 drivers/gpu/drm/i915/i915_sw_fence.c __i915_sw_fence_clear_checked_bit(fence); fence 302 drivers/gpu/drm/i915/i915_sw_fence.c static int __i915_sw_fence_await_sw_fence(struct i915_sw_fence *fence, fence 309 drivers/gpu/drm/i915/i915_sw_fence.c debug_fence_assert(fence); fence 313 drivers/gpu/drm/i915/i915_sw_fence.c i915_sw_fence_set_error_once(fence, signaler->error); fence 320 drivers/gpu/drm/i915/i915_sw_fence.c if (unlikely(i915_sw_fence_check_if_after(fence, signaler))) fence 331 drivers/gpu/drm/i915/i915_sw_fence.c i915_sw_fence_set_error_once(fence, signaler->error); fence 341 drivers/gpu/drm/i915/i915_sw_fence.c wq->private = fence; fence 343 drivers/gpu/drm/i915/i915_sw_fence.c i915_sw_fence_await(fence); fence 358 drivers/gpu/drm/i915/i915_sw_fence.c int i915_sw_fence_await_sw_fence(struct i915_sw_fence *fence, fence 362 drivers/gpu/drm/i915/i915_sw_fence.c return __i915_sw_fence_await_sw_fence(fence, signaler, wq, 0); fence 365 drivers/gpu/drm/i915/i915_sw_fence.c int i915_sw_fence_await_sw_fence_gfp(struct i915_sw_fence *fence, fence 369 drivers/gpu/drm/i915/i915_sw_fence.c return __i915_sw_fence_await_sw_fence(fence, signaler, NULL, gfp); fence 385 drivers/gpu/drm/i915/i915_sw_fence.c i915_sw_fence_set_error_once(cb->fence, dma->error); fence 386 drivers/gpu/drm/i915/i915_sw_fence.c i915_sw_fence_complete(cb->fence); fence 393 drivers/gpu/drm/i915/i915_sw_fence.c struct i915_sw_fence *fence; fence 395 drivers/gpu/drm/i915/i915_sw_fence.c fence = xchg(&cb->base.fence, NULL); fence 396 drivers/gpu/drm/i915/i915_sw_fence.c if (!fence) fence 403 drivers/gpu/drm/i915/i915_sw_fence.c i915_sw_fence_debug_hint(fence)); fence 405 drivers/gpu/drm/i915/i915_sw_fence.c i915_sw_fence_set_error_once(fence, -ETIMEDOUT); fence 406 drivers/gpu/drm/i915/i915_sw_fence.c i915_sw_fence_complete(fence); fence 414 drivers/gpu/drm/i915/i915_sw_fence.c struct i915_sw_fence *fence; fence 416 drivers/gpu/drm/i915/i915_sw_fence.c fence = xchg(&cb->base.fence, NULL); fence 417 drivers/gpu/drm/i915/i915_sw_fence.c if (fence) fence 418 drivers/gpu/drm/i915/i915_sw_fence.c i915_sw_fence_complete(fence); fence 434 drivers/gpu/drm/i915/i915_sw_fence.c int i915_sw_fence_await_dma_fence(struct i915_sw_fence *fence, fence 443 drivers/gpu/drm/i915/i915_sw_fence.c debug_fence_assert(fence); fence 460 drivers/gpu/drm/i915/i915_sw_fence.c cb->fence = fence; fence 461 drivers/gpu/drm/i915/i915_sw_fence.c i915_sw_fence_await(fence); fence 495 drivers/gpu/drm/i915/i915_sw_fence.c i915_sw_fence_set_error_once(cb->fence, dma->error); fence 496 drivers/gpu/drm/i915/i915_sw_fence.c i915_sw_fence_complete(cb->fence); fence 499 drivers/gpu/drm/i915/i915_sw_fence.c int __i915_sw_fence_await_dma_fence(struct i915_sw_fence *fence, fence 505 drivers/gpu/drm/i915/i915_sw_fence.c debug_fence_assert(fence); fence 510 drivers/gpu/drm/i915/i915_sw_fence.c cb->fence = fence; fence 511 drivers/gpu/drm/i915/i915_sw_fence.c i915_sw_fence_await(fence); fence 525 drivers/gpu/drm/i915/i915_sw_fence.c int i915_sw_fence_await_reservation(struct i915_sw_fence *fence, fence 535 drivers/gpu/drm/i915/i915_sw_fence.c debug_fence_assert(fence); fence 551 drivers/gpu/drm/i915/i915_sw_fence.c pending = i915_sw_fence_await_dma_fence(fence, fence 571 drivers/gpu/drm/i915/i915_sw_fence.c pending = i915_sw_fence_await_dma_fence(fence, fence 41 drivers/gpu/drm/i915/i915_sw_fence.h void __i915_sw_fence_init(struct i915_sw_fence *fence, fence 46 drivers/gpu/drm/i915/i915_sw_fence.h #define i915_sw_fence_init(fence, fn) \ fence 50 drivers/gpu/drm/i915/i915_sw_fence.h __i915_sw_fence_init((fence), (fn), #fence, &__key); \ fence 53 drivers/gpu/drm/i915/i915_sw_fence.h #define i915_sw_fence_init(fence, fn) \ fence 54 drivers/gpu/drm/i915/i915_sw_fence.h __i915_sw_fence_init((fence), (fn), NULL, NULL) fence 58 drivers/gpu/drm/i915/i915_sw_fence.h void i915_sw_fence_fini(struct i915_sw_fence *fence); fence 60 drivers/gpu/drm/i915/i915_sw_fence.h static inline void i915_sw_fence_fini(struct i915_sw_fence *fence) {} fence 63 drivers/gpu/drm/i915/i915_sw_fence.h void i915_sw_fence_commit(struct i915_sw_fence *fence); fence 65 drivers/gpu/drm/i915/i915_sw_fence.h int i915_sw_fence_await_sw_fence(struct i915_sw_fence *fence, fence 68 drivers/gpu/drm/i915/i915_sw_fence.h int i915_sw_fence_await_sw_fence_gfp(struct i915_sw_fence *fence, fence 74 drivers/gpu/drm/i915/i915_sw_fence.h struct i915_sw_fence *fence; fence 77 drivers/gpu/drm/i915/i915_sw_fence.h int __i915_sw_fence_await_dma_fence(struct i915_sw_fence *fence, fence 80 drivers/gpu/drm/i915/i915_sw_fence.h int i915_sw_fence_await_dma_fence(struct i915_sw_fence *fence, fence 85 drivers/gpu/drm/i915/i915_sw_fence.h int i915_sw_fence_await_reservation(struct i915_sw_fence *fence, fence 92 drivers/gpu/drm/i915/i915_sw_fence.h void i915_sw_fence_await(struct i915_sw_fence *fence); fence 93 drivers/gpu/drm/i915/i915_sw_fence.h void i915_sw_fence_complete(struct i915_sw_fence *fence); fence 95 drivers/gpu/drm/i915/i915_sw_fence.h static inline bool i915_sw_fence_signaled(const struct i915_sw_fence *fence) fence 97 drivers/gpu/drm/i915/i915_sw_fence.h return atomic_read(&fence->pending) <= 0; fence 100 drivers/gpu/drm/i915/i915_sw_fence.h static inline bool i915_sw_fence_done(const struct i915_sw_fence *fence) fence 102 drivers/gpu/drm/i915/i915_sw_fence.h return atomic_read(&fence->pending) < 0; fence 105 drivers/gpu/drm/i915/i915_sw_fence.h static inline void i915_sw_fence_wait(struct i915_sw_fence *fence) fence 107 drivers/gpu/drm/i915/i915_sw_fence.h wait_event(fence->wait, i915_sw_fence_done(fence)); fence 111 drivers/gpu/drm/i915/i915_sw_fence.h i915_sw_fence_set_error_once(struct i915_sw_fence *fence, int error) fence 113 drivers/gpu/drm/i915/i915_sw_fence.h cmpxchg(&fence->error, 0, error); fence 22 drivers/gpu/drm/i915/i915_sw_fence_work.c fence_notify(struct i915_sw_fence *fence, enum i915_sw_fence_notify state) fence 24 drivers/gpu/drm/i915/i915_sw_fence_work.c struct dma_fence_work *f = container_of(fence, typeof(*f), chain); fence 28 drivers/gpu/drm/i915/i915_sw_fence_work.c if (fence->error) fence 29 drivers/gpu/drm/i915/i915_sw_fence_work.c dma_fence_set_error(&f->dma, fence->error); fence 47 drivers/gpu/drm/i915/i915_sw_fence_work.c static const char *get_driver_name(struct dma_fence *fence) fence 52 drivers/gpu/drm/i915/i915_sw_fence_work.c static const char *get_timeline_name(struct dma_fence *fence) fence 54 drivers/gpu/drm/i915/i915_sw_fence_work.c struct dma_fence_work *f = container_of(fence, typeof(*f), dma); fence 59 drivers/gpu/drm/i915/i915_sw_fence_work.c static void fence_release(struct dma_fence *fence) fence 61 drivers/gpu/drm/i915/i915_sw_fence_work.c struct dma_fence_work *f = container_of(fence, typeof(*f), dma); fence 681 drivers/gpu/drm/i915/i915_trace.h __entry->ctx = rq->fence.context; fence 682 drivers/gpu/drm/i915/i915_trace.h __entry->seqno = rq->fence.seqno; fence 710 drivers/gpu/drm/i915/i915_trace.h __entry->ctx = rq->fence.context; fence 711 drivers/gpu/drm/i915/i915_trace.h __entry->seqno = rq->fence.seqno; fence 755 drivers/gpu/drm/i915/i915_trace.h __entry->ctx = rq->fence.context; fence 756 drivers/gpu/drm/i915/i915_trace.h __entry->seqno = rq->fence.seqno; fence 786 drivers/gpu/drm/i915/i915_trace.h __entry->ctx = rq->fence.context; fence 787 drivers/gpu/drm/i915/i915_trace.h __entry->seqno = rq->fence.seqno; fence 851 drivers/gpu/drm/i915/i915_trace.h __entry->ctx = rq->fence.context; fence 852 drivers/gpu/drm/i915/i915_trace.h __entry->seqno = rq->fence.seqno; fence 801 drivers/gpu/drm/i915/i915_vma.c GEM_BUG_ON(vma->fence); fence 915 drivers/gpu/drm/i915/i915_vma.c dma_resv_add_excl_fence(vma->resv, &rq->fence); fence 923 drivers/gpu/drm/i915/i915_vma.c dma_resv_add_shared_fence(vma->resv, &rq->fence); fence 999 drivers/gpu/drm/i915/i915_vma.c GEM_BUG_ON(vma->fence); fence 57 drivers/gpu/drm/i915/i915_vma.h struct i915_fence_reg *fence; fence 429 drivers/gpu/drm/i915/i915_vma.h GEM_BUG_ON(atomic_read(&vma->fence->pin_count) <= 0); fence 430 drivers/gpu/drm/i915/i915_vma.h atomic_dec(&vma->fence->pin_count); fence 445 drivers/gpu/drm/i915/i915_vma.h if (vma->fence) fence 6890 drivers/gpu/drm/i915/intel_pm.c !dma_fence_is_signaled_locked(&rq->fence)) { fence 453 drivers/gpu/drm/i915/selftests/i915_gem_evict.c struct i915_sw_fence fence; fence 464 drivers/gpu/drm/i915/selftests/i915_gem_evict.c onstack_fence_init(&fence); fence 491 drivers/gpu/drm/i915/selftests/i915_gem_evict.c &fence, fence 502 drivers/gpu/drm/i915/selftests/i915_gem_evict.c onstack_fence_fini(&fence); fence 13 drivers/gpu/drm/i915/selftests/i915_mock_selftests.h selftest(fence, i915_sw_fence_mock_selftests) fence 150 drivers/gpu/drm/i915/selftests/i915_request.c if (dma_fence_wait_timeout(&request->fence, false, T) != -ETIME) { fence 158 drivers/gpu/drm/i915/selftests/i915_request.c if (dma_fence_is_signaled(&request->fence)) { fence 163 drivers/gpu/drm/i915/selftests/i915_request.c if (dma_fence_wait_timeout(&request->fence, false, T / 2) != -ETIME) { fence 168 drivers/gpu/drm/i915/selftests/i915_request.c if (dma_fence_wait_timeout(&request->fence, false, T) <= 0) { fence 173 drivers/gpu/drm/i915/selftests/i915_request.c if (!dma_fence_is_signaled(&request->fence)) { fence 178 drivers/gpu/drm/i915/selftests/i915_request.c if (dma_fence_wait_timeout(&request->fence, false, T) <= 0) { fence 364 drivers/gpu/drm/i915/selftests/i915_request.c &rq->fence, fence 385 drivers/gpu/drm/i915/selftests/i915_request.c rq->fence.context, rq->fence.seqno, fence 399 drivers/gpu/drm/i915/selftests/i915_request.c &rq->fence.flags)) { fence 401 drivers/gpu/drm/i915/selftests/i915_request.c rq->fence.context, rq->fence.seqno); fence 980 drivers/gpu/drm/i915/selftests/i915_request.c &prev->fence); fence 32 drivers/gpu/drm/i915/selftests/i915_sw_fence.c fence_notify(struct i915_sw_fence *fence, enum i915_sw_fence_notify state) fence 48 drivers/gpu/drm/i915/selftests/i915_sw_fence.c struct i915_sw_fence *fence; fence 50 drivers/gpu/drm/i915/selftests/i915_sw_fence.c fence = kmalloc(sizeof(*fence), GFP_KERNEL); fence 51 drivers/gpu/drm/i915/selftests/i915_sw_fence.c if (!fence) fence 54 drivers/gpu/drm/i915/selftests/i915_sw_fence.c i915_sw_fence_init(fence, fence_notify); fence 55 drivers/gpu/drm/i915/selftests/i915_sw_fence.c return fence; fence 58 drivers/gpu/drm/i915/selftests/i915_sw_fence.c static void free_fence(struct i915_sw_fence *fence) fence 60 drivers/gpu/drm/i915/selftests/i915_sw_fence.c i915_sw_fence_fini(fence); fence 61 drivers/gpu/drm/i915/selftests/i915_sw_fence.c kfree(fence); fence 64 drivers/gpu/drm/i915/selftests/i915_sw_fence.c static int __test_self(struct i915_sw_fence *fence) fence 66 drivers/gpu/drm/i915/selftests/i915_sw_fence.c if (i915_sw_fence_done(fence)) fence 69 drivers/gpu/drm/i915/selftests/i915_sw_fence.c i915_sw_fence_commit(fence); fence 70 drivers/gpu/drm/i915/selftests/i915_sw_fence.c if (!i915_sw_fence_done(fence)) fence 73 drivers/gpu/drm/i915/selftests/i915_sw_fence.c i915_sw_fence_wait(fence); fence 74 drivers/gpu/drm/i915/selftests/i915_sw_fence.c if (!i915_sw_fence_done(fence)) fence 82 drivers/gpu/drm/i915/selftests/i915_sw_fence.c struct i915_sw_fence *fence; fence 86 drivers/gpu/drm/i915/selftests/i915_sw_fence.c fence = alloc_fence(); fence 87 drivers/gpu/drm/i915/selftests/i915_sw_fence.c if (!fence) fence 90 drivers/gpu/drm/i915/selftests/i915_sw_fence.c ret = __test_self(fence); fence 92 drivers/gpu/drm/i915/selftests/i915_sw_fence.c free_fence(fence); fence 576 drivers/gpu/drm/i915/selftests/i915_sw_fence.c if (!i915_sw_fence_done(&tf.fence)) { fence 586 drivers/gpu/drm/i915/selftests/i915_sw_fence.c if (i915_sw_fence_done(&tf.fence)) { fence 592 drivers/gpu/drm/i915/selftests/i915_sw_fence.c i915_sw_fence_wait(&tf.fence); fence 595 drivers/gpu/drm/i915/selftests/i915_sw_fence.c if (!i915_sw_fence_done(&tf.fence)) { fence 616 drivers/gpu/drm/i915/selftests/i915_sw_fence.c static const char *mock_name(struct dma_fence *fence) fence 642 drivers/gpu/drm/i915/selftests/i915_sw_fence.c struct i915_sw_fence *fence; fence 645 drivers/gpu/drm/i915/selftests/i915_sw_fence.c fence = alloc_fence(); fence 646 drivers/gpu/drm/i915/selftests/i915_sw_fence.c if (!fence) fence 649 drivers/gpu/drm/i915/selftests/i915_sw_fence.c err = i915_sw_fence_await_dma_fence(fence, dma, delay, GFP_NOWAIT); fence 650 drivers/gpu/drm/i915/selftests/i915_sw_fence.c i915_sw_fence_commit(fence); fence 652 drivers/gpu/drm/i915/selftests/i915_sw_fence.c free_fence(fence); fence 656 drivers/gpu/drm/i915/selftests/i915_sw_fence.c return fence; fence 63 drivers/gpu/drm/i915/selftests/igt_spinner.c static unsigned int seqno_offset(u64 fence) fence 65 drivers/gpu/drm/i915/selftests/igt_spinner.c return offset_in_page(sizeof(u32) * fence); fence 71 drivers/gpu/drm/i915/selftests/igt_spinner.c return hws->node.start + seqno_offset(rq->fence.context); fence 138 drivers/gpu/drm/i915/selftests/igt_spinner.c *batch++ = rq->fence.seqno; fence 173 drivers/gpu/drm/i915/selftests/igt_spinner.c u32 *seqno = spin->seqno + seqno_offset(rq->fence.context); fence 198 drivers/gpu/drm/i915/selftests/igt_spinner.c rq->fence.seqno), fence 201 drivers/gpu/drm/i915/selftests/igt_spinner.c rq->fence.seqno), fence 30 drivers/gpu/drm/i915/selftests/lib_sw_fence.c nop_fence_notify(struct i915_sw_fence *fence, enum i915_sw_fence_notify state) fence 35 drivers/gpu/drm/i915/selftests/lib_sw_fence.c void __onstack_fence_init(struct i915_sw_fence *fence, fence 39 drivers/gpu/drm/i915/selftests/lib_sw_fence.c debug_fence_init_onstack(fence); fence 41 drivers/gpu/drm/i915/selftests/lib_sw_fence.c __init_waitqueue_head(&fence->wait, name, key); fence 42 drivers/gpu/drm/i915/selftests/lib_sw_fence.c atomic_set(&fence->pending, 1); fence 43 drivers/gpu/drm/i915/selftests/lib_sw_fence.c fence->error = 0; fence 44 drivers/gpu/drm/i915/selftests/lib_sw_fence.c fence->flags = (unsigned long)nop_fence_notify; fence 47 drivers/gpu/drm/i915/selftests/lib_sw_fence.c void onstack_fence_fini(struct i915_sw_fence *fence) fence 49 drivers/gpu/drm/i915/selftests/lib_sw_fence.c if (!fence->flags) fence 52 drivers/gpu/drm/i915/selftests/lib_sw_fence.c i915_sw_fence_commit(fence); fence 53 drivers/gpu/drm/i915/selftests/lib_sw_fence.c i915_sw_fence_fini(fence); fence 60 drivers/gpu/drm/i915/selftests/lib_sw_fence.c i915_sw_fence_commit(&tf->fence); fence 65 drivers/gpu/drm/i915/selftests/lib_sw_fence.c onstack_fence_init(&tf->fence); fence 72 drivers/gpu/drm/i915/selftests/lib_sw_fence.c i915_sw_fence_commit(&tf->fence); fence 78 drivers/gpu/drm/i915/selftests/lib_sw_fence.c i915_sw_fence_commit(&tf->fence); fence 81 drivers/gpu/drm/i915/selftests/lib_sw_fence.c i915_sw_fence_fini(&tf->fence); fence 85 drivers/gpu/drm/i915/selftests/lib_sw_fence.c struct i915_sw_fence fence; fence 93 drivers/gpu/drm/i915/selftests/lib_sw_fence.c heap_fence_notify(struct i915_sw_fence *fence, enum i915_sw_fence_notify state) fence 95 drivers/gpu/drm/i915/selftests/lib_sw_fence.c struct heap_fence *h = container_of(fence, typeof(*h), fence); fence 102 drivers/gpu/drm/i915/selftests/lib_sw_fence.c heap_fence_put(&h->fence); fence 116 drivers/gpu/drm/i915/selftests/lib_sw_fence.c i915_sw_fence_init(&h->fence, heap_fence_notify); fence 119 drivers/gpu/drm/i915/selftests/lib_sw_fence.c return &h->fence; fence 126 drivers/gpu/drm/i915/selftests/lib_sw_fence.c i915_sw_fence_fini(&h->fence); fence 131 drivers/gpu/drm/i915/selftests/lib_sw_fence.c void heap_fence_put(struct i915_sw_fence *fence) fence 133 drivers/gpu/drm/i915/selftests/lib_sw_fence.c struct heap_fence *h = container_of(fence, typeof(*h), fence); fence 16 drivers/gpu/drm/i915/selftests/lib_sw_fence.h #define onstack_fence_init(fence) \ fence 20 drivers/gpu/drm/i915/selftests/lib_sw_fence.h __onstack_fence_init((fence), #fence, &__key); \ fence 23 drivers/gpu/drm/i915/selftests/lib_sw_fence.h #define onstack_fence_init(fence) \ fence 24 drivers/gpu/drm/i915/selftests/lib_sw_fence.h __onstack_fence_init((fence), NULL, NULL) fence 27 drivers/gpu/drm/i915/selftests/lib_sw_fence.h void __onstack_fence_init(struct i915_sw_fence *fence, fence 30 drivers/gpu/drm/i915/selftests/lib_sw_fence.h void onstack_fence_fini(struct i915_sw_fence *fence); fence 33 drivers/gpu/drm/i915/selftests/lib_sw_fence.h struct i915_sw_fence fence; fence 41 drivers/gpu/drm/i915/selftests/lib_sw_fence.h void heap_fence_put(struct i915_sw_fence *fence); fence 211 drivers/gpu/drm/lima/lima_gem.c struct dma_fence *fence = NULL; fence 217 drivers/gpu/drm/lima/lima_gem.c 0, 0, &fence); fence 221 drivers/gpu/drm/lima/lima_gem.c err = drm_gem_fence_array_add(&submit->task->deps, fence); fence 223 drivers/gpu/drm/lima/lima_gem.c dma_fence_put(fence); fence 238 drivers/gpu/drm/lima/lima_gem.c struct dma_fence *fence; fence 294 drivers/gpu/drm/lima/lima_gem.c fence = lima_sched_context_queue_task( fence 299 drivers/gpu/drm/lima/lima_gem.c dma_resv_add_excl_fence(bos[i]->gem.resv, fence); fence 301 drivers/gpu/drm/lima/lima_gem.c dma_resv_add_shared_fence(bos[i]->gem.resv, fence); fence 310 drivers/gpu/drm/lima/lima_gem.c drm_syncobj_replace_fence(out_sync, fence); fence 314 drivers/gpu/drm/lima/lima_gem.c dma_fence_put(fence); fence 45 drivers/gpu/drm/lima/lima_sched.c static inline struct lima_fence *to_lima_fence(struct dma_fence *fence) fence 47 drivers/gpu/drm/lima/lima_sched.c return container_of(fence, struct lima_fence, base); fence 50 drivers/gpu/drm/lima/lima_sched.c static const char *lima_fence_get_driver_name(struct dma_fence *fence) fence 55 drivers/gpu/drm/lima/lima_sched.c static const char *lima_fence_get_timeline_name(struct dma_fence *fence) fence 57 drivers/gpu/drm/lima/lima_sched.c struct lima_fence *f = to_lima_fence(fence); fence 65 drivers/gpu/drm/lima/lima_sched.c struct lima_fence *fence = to_lima_fence(f); fence 67 drivers/gpu/drm/lima/lima_sched.c kmem_cache_free(lima_fence_slab, fence); fence 70 drivers/gpu/drm/lima/lima_sched.c static void lima_fence_release(struct dma_fence *fence) fence 72 drivers/gpu/drm/lima/lima_sched.c struct lima_fence *f = to_lima_fence(fence); fence 85 drivers/gpu/drm/lima/lima_sched.c struct lima_fence *fence; fence 87 drivers/gpu/drm/lima/lima_sched.c fence = kmem_cache_zalloc(lima_fence_slab, GFP_KERNEL); fence 88 drivers/gpu/drm/lima/lima_sched.c if (!fence) fence 91 drivers/gpu/drm/lima/lima_sched.c fence->pipe = pipe; fence 92 drivers/gpu/drm/lima/lima_sched.c dma_fence_init(&fence->base, &lima_fence_ops, &pipe->fence_lock, fence 95 drivers/gpu/drm/lima/lima_sched.c return fence; fence 138 drivers/gpu/drm/lima/lima_sched.c struct dma_fence *fence; fence 144 drivers/gpu/drm/lima/lima_sched.c xa_for_each(&task->deps, index, fence) { fence 145 drivers/gpu/drm/lima/lima_sched.c dma_fence_put(fence); fence 176 drivers/gpu/drm/lima/lima_sched.c struct dma_fence *fence = dma_fence_get(&task->base.s_fence->finished); fence 179 drivers/gpu/drm/lima/lima_sched.c return fence; fence 197 drivers/gpu/drm/lima/lima_sched.c struct lima_fence *fence; fence 206 drivers/gpu/drm/lima/lima_sched.c fence = lima_fence_create(pipe); fence 207 drivers/gpu/drm/lima/lima_sched.c if (!fence) fence 209 drivers/gpu/drm/lima/lima_sched.c task->fence = &fence->base; fence 214 drivers/gpu/drm/lima/lima_sched.c ret = dma_fence_get(task->fence); fence 255 drivers/gpu/drm/lima/lima_sched.c return task->fence; fence 305 drivers/gpu/drm/lima/lima_sched.c dma_fence_put(task->fence); fence 357 drivers/gpu/drm/lima/lima_sched.c dma_fence_signal(task->fence); fence 24 drivers/gpu/drm/lima/lima_sched.h struct dma_fence *fence; fence 1041 drivers/gpu/drm/mga/mga_state.c u32 *fence = data; fence 1055 drivers/gpu/drm/mga/mga_state.c *fence = dev_priv->next_fence_to_post; fence 1071 drivers/gpu/drm/mga/mga_state.c u32 *fence = data; fence 1080 drivers/gpu/drm/mga/mga_state.c mga_driver_fence_wait(dev, fence); fence 102 drivers/gpu/drm/msm/adreno/a5xx_gpu.c ring->memptrs->fence = submit->seqno; fence 190 drivers/gpu/drm/msm/adreno/a5xx_gpu.c OUT_RING(ring, lower_32_bits(rbmemptr(ring, fence))); fence 191 drivers/gpu/drm/msm/adreno/a5xx_gpu.c OUT_RING(ring, upper_32_bits(rbmemptr(ring, fence))); fence 143 drivers/gpu/drm/msm/adreno/a6xx_gpu.c OUT_RING(ring, lower_32_bits(rbmemptr(ring, fence))); fence 144 drivers/gpu/drm/msm/adreno/a6xx_gpu.c OUT_RING(ring, upper_32_bits(rbmemptr(ring, fence))); fence 353 drivers/gpu/drm/msm/adreno/adreno_gpu.c ring->memptrs->fence = ring->seqno; fence 462 drivers/gpu/drm/msm/adreno/adreno_gpu.c OUT_RING(ring, rbmemptr(ring, fence)); fence 468 drivers/gpu/drm/msm/adreno/adreno_gpu.c OUT_RING(ring, rbmemptr(ring, fence)); fence 535 drivers/gpu/drm/msm/adreno/adreno_gpu.c state->ring[i].fence = gpu->rb[i]->memptrs->fence; fence 717 drivers/gpu/drm/msm/adreno/adreno_gpu.c drm_printf(p, " retired-fence: %d\n", state->ring[i].fence); fence 771 drivers/gpu/drm/msm/adreno/adreno_gpu.c ring->memptrs->fence, fence 891 drivers/gpu/drm/msm/msm_drv.c ret = msm_wait_fence(gpu->rb[queue->prio]->fctx, args->fence, &timeout, fence 303 drivers/gpu/drm/msm/msm_drv.h struct msm_gpu *gpu, bool exclusive, struct dma_fence *fence); fence 36 drivers/gpu/drm/msm/msm_fence.c static inline bool fence_completed(struct msm_fence_context *fctx, uint32_t fence) fence 38 drivers/gpu/drm/msm/msm_fence.c return (int32_t)(fctx->completed_fence - fence) >= 0; fence 42 drivers/gpu/drm/msm/msm_fence.c int msm_wait_fence(struct msm_fence_context *fctx, uint32_t fence, fence 47 drivers/gpu/drm/msm/msm_fence.c if (fence > fctx->last_fence) { fence 49 drivers/gpu/drm/msm/msm_fence.c fctx->name, fence, fctx->last_fence); fence 55 drivers/gpu/drm/msm/msm_fence.c ret = fence_completed(fctx, fence) ? 0 : -EBUSY; fence 61 drivers/gpu/drm/msm/msm_fence.c fence_completed(fctx, fence), fence 65 drivers/gpu/drm/msm/msm_fence.c fence_completed(fctx, fence), fence 70 drivers/gpu/drm/msm/msm_fence.c fence, fctx->completed_fence); fence 81 drivers/gpu/drm/msm/msm_fence.c void msm_update_fence(struct msm_fence_context *fctx, uint32_t fence) fence 84 drivers/gpu/drm/msm/msm_fence.c fctx->completed_fence = max(fence, fctx->completed_fence); fence 95 drivers/gpu/drm/msm/msm_fence.c static inline struct msm_fence *to_msm_fence(struct dma_fence *fence) fence 97 drivers/gpu/drm/msm/msm_fence.c return container_of(fence, struct msm_fence, base); fence 100 drivers/gpu/drm/msm/msm_fence.c static const char *msm_fence_get_driver_name(struct dma_fence *fence) fence 105 drivers/gpu/drm/msm/msm_fence.c static const char *msm_fence_get_timeline_name(struct dma_fence *fence) fence 107 drivers/gpu/drm/msm/msm_fence.c struct msm_fence *f = to_msm_fence(fence); fence 111 drivers/gpu/drm/msm/msm_fence.c static bool msm_fence_signaled(struct dma_fence *fence) fence 113 drivers/gpu/drm/msm/msm_fence.c struct msm_fence *f = to_msm_fence(fence); fence 27 drivers/gpu/drm/msm/msm_fence.h int msm_wait_fence(struct msm_fence_context *fctx, uint32_t fence, fence 29 drivers/gpu/drm/msm/msm_fence.h void msm_update_fence(struct msm_fence_context *fctx, uint32_t fence); fence 706 drivers/gpu/drm/msm/msm_gem.c struct dma_fence *fence; fence 711 drivers/gpu/drm/msm/msm_gem.c fence = dma_resv_get_excl(obj->resv); fence 713 drivers/gpu/drm/msm/msm_gem.c if (fence && (fence->context != fctx->context)) { fence 714 drivers/gpu/drm/msm/msm_gem.c ret = dma_fence_wait(fence, true); fence 724 drivers/gpu/drm/msm/msm_gem.c fence = rcu_dereference_protected(fobj->shared[i], fence 726 drivers/gpu/drm/msm/msm_gem.c if (fence->context != fctx->context) { fence 727 drivers/gpu/drm/msm/msm_gem.c ret = dma_fence_wait(fence, true); fence 737 drivers/gpu/drm/msm/msm_gem.c struct msm_gpu *gpu, bool exclusive, struct dma_fence *fence) fence 743 drivers/gpu/drm/msm/msm_gem.c dma_resv_add_excl_fence(obj->resv, fence); fence 745 drivers/gpu/drm/msm/msm_gem.c dma_resv_add_shared_fence(obj->resv, fence); fence 789 drivers/gpu/drm/msm/msm_gem.c static void describe_fence(struct dma_fence *fence, const char *type, fence 792 drivers/gpu/drm/msm/msm_gem.c if (!dma_fence_is_signaled(fence)) fence 794 drivers/gpu/drm/msm/msm_gem.c fence->ops->get_driver_name(fence), fence 795 drivers/gpu/drm/msm/msm_gem.c fence->ops->get_timeline_name(fence), fence 796 drivers/gpu/drm/msm/msm_gem.c fence->seqno); fence 804 drivers/gpu/drm/msm/msm_gem.c struct dma_fence *fence; fence 845 drivers/gpu/drm/msm/msm_gem.c fobj = rcu_dereference(robj->fence); fence 850 drivers/gpu/drm/msm/msm_gem.c fence = rcu_dereference(fobj->shared[i]); fence 851 drivers/gpu/drm/msm/msm_gem.c describe_fence(fence, "Shared", m); fence 855 drivers/gpu/drm/msm/msm_gem.c fence = rcu_dereference(robj->fence_excl); fence 856 drivers/gpu/drm/msm/msm_gem.c if (fence) fence 857 drivers/gpu/drm/msm/msm_gem.c describe_fence(fence, "Exclusive", m); fence 138 drivers/gpu/drm/msm/msm_gem.h struct dma_fence *fence; fence 46 drivers/gpu/drm/msm/msm_gem_submit.c submit->fence = NULL; fence 64 drivers/gpu/drm/msm/msm_gem_submit.c dma_fence_put(submit->fence); fence 565 drivers/gpu/drm/msm/msm_gem_submit.c submit->fence = msm_fence_alloc(ring->fctx); fence 566 drivers/gpu/drm/msm/msm_gem_submit.c if (IS_ERR(submit->fence)) { fence 567 drivers/gpu/drm/msm/msm_gem_submit.c ret = PTR_ERR(submit->fence); fence 568 drivers/gpu/drm/msm/msm_gem_submit.c submit->fence = NULL; fence 573 drivers/gpu/drm/msm/msm_gem_submit.c sync_file = sync_file_create(submit->fence); fence 582 drivers/gpu/drm/msm/msm_gem_submit.c args->fence = submit->fence->seqno; fence 390 drivers/gpu/drm/msm/msm_gpu.c uint32_t fence) fence 395 drivers/gpu/drm/msm/msm_gpu.c if (submit->seqno > fence) fence 399 drivers/gpu/drm/msm/msm_gpu.c submit->fence->seqno); fence 404 drivers/gpu/drm/msm/msm_gpu.c find_submit(struct msm_ringbuffer *ring, uint32_t fence) fence 411 drivers/gpu/drm/msm/msm_gpu.c if (submit->seqno == fence) fence 433 drivers/gpu/drm/msm/msm_gpu.c submit = find_submit(cur_ring, cur_ring->memptrs->fence + 1); fence 474 drivers/gpu/drm/msm/msm_gpu.c uint32_t fence = ring->memptrs->fence; fence 481 drivers/gpu/drm/msm/msm_gpu.c fence++; fence 483 drivers/gpu/drm/msm/msm_gpu.c update_fences(gpu, ring, fence); fence 524 drivers/gpu/drm/msm/msm_gpu.c uint32_t fence = ring->memptrs->fence; fence 526 drivers/gpu/drm/msm/msm_gpu.c if (fence != ring->hangcheck_fence) { fence 528 drivers/gpu/drm/msm/msm_gpu.c ring->hangcheck_fence = fence; fence 529 drivers/gpu/drm/msm/msm_gpu.c } else if (fence < ring->seqno) { fence 531 drivers/gpu/drm/msm/msm_gpu.c ring->hangcheck_fence = fence; fence 535 drivers/gpu/drm/msm/msm_gpu.c gpu->name, fence); fence 700 drivers/gpu/drm/msm/msm_gpu.c if (dma_fence_is_signaled(submit->fence)) fence 713 drivers/gpu/drm/msm/msm_gpu.c update_fences(gpu, gpu->rb[i], gpu->rb[i]->memptrs->fence); fence 765 drivers/gpu/drm/msm/msm_gpu.c msm_gem_move_to_active(&msm_obj->base, gpu, true, submit->fence); fence 767 drivers/gpu/drm/msm/msm_gpu.c msm_gem_move_to_active(&msm_obj->base, gpu, false, submit->fence); fence 151 drivers/gpu/drm/msm/msm_gpu.h if (ring->seqno > ring->memptrs->fence) fence 193 drivers/gpu/drm/msm/msm_gpu.h u32 fence; fence 31 drivers/gpu/drm/msm/msm_ringbuffer.h volatile uint32_t fence; fence 1045 drivers/gpu/drm/nouveau/dispnv04/crtc.c struct nouveau_fence_chan *fctx = chan->fence; fence 1101 drivers/gpu/drm/nouveau/dispnv04/crtc.c struct nouveau_fence_chan *fctx = chan->fence; fence 1151 drivers/gpu/drm/nouveau/dispnv04/crtc.c struct nouveau_fence *fence; fence 1217 drivers/gpu/drm/nouveau/dispnv04/crtc.c ret = nv04_page_flip_emit(chan, old_bo, new_bo, s, &fence); fence 1225 drivers/gpu/drm/nouveau/dispnv04/crtc.c nouveau_bo_fence(old_bo, fence, false); fence 1229 drivers/gpu/drm/nouveau/dispnv04/crtc.c nouveau_fence_unref(&fence); fence 506 drivers/gpu/drm/nouveau/dispnv50/wndw.c asyw->state.fence = dma_resv_get_excl_rcu(fb->nvbo->bo.base.resv); fence 60 drivers/gpu/drm/nouveau/nouveau_bo.c nouveau_fence_unref(®->fence); fence 80 drivers/gpu/drm/nouveau/nouveau_bo.c (!tile->fence || nouveau_fence_done(tile->fence))) fence 91 drivers/gpu/drm/nouveau/nouveau_bo.c struct dma_fence *fence) fence 97 drivers/gpu/drm/nouveau/nouveau_bo.c tile->fence = (struct nouveau_fence *)dma_fence_get(fence); fence 1130 drivers/gpu/drm/nouveau/nouveau_bo.c struct nouveau_fence *fence; fence 1148 drivers/gpu/drm/nouveau/nouveau_bo.c ret = nouveau_fence_new(chan, false, &fence); fence 1151 drivers/gpu/drm/nouveau/nouveau_bo.c &fence->base, fence 1154 drivers/gpu/drm/nouveau/nouveau_bo.c nouveau_fence_unref(&fence); fence 1356 drivers/gpu/drm/nouveau/nouveau_bo.c struct dma_fence *fence = dma_resv_get_excl(bo->base.resv); fence 1358 drivers/gpu/drm/nouveau/nouveau_bo.c nv10_bo_put_tile_region(dev, *old_tile, fence); fence 1685 drivers/gpu/drm/nouveau/nouveau_bo.c nouveau_bo_fence(struct nouveau_bo *nvbo, struct nouveau_fence *fence, bool exclusive) fence 1690 drivers/gpu/drm/nouveau/nouveau_bo.c dma_resv_add_excl_fence(resv, &fence->base); fence 1691 drivers/gpu/drm/nouveau/nouveau_bo.c else if (fence) fence 1692 drivers/gpu/drm/nouveau/nouveau_bo.c dma_resv_add_shared_fence(resv, &fence->base); fence 64 drivers/gpu/drm/nouveau/nouveau_chan.c if (likely(chan && chan->fence && !atomic_read(&chan->killed))) { fence 66 drivers/gpu/drm/nouveau/nouveau_chan.c struct nouveau_fence *fence = NULL; fence 69 drivers/gpu/drm/nouveau/nouveau_chan.c ret = nouveau_fence_new(chan, false, &fence); fence 71 drivers/gpu/drm/nouveau/nouveau_chan.c ret = nouveau_fence_wait(fence, false, false); fence 72 drivers/gpu/drm/nouveau/nouveau_chan.c nouveau_fence_unref(&fence); fence 97 drivers/gpu/drm/nouveau/nouveau_chan.c if (chan->fence) fence 30 drivers/gpu/drm/nouveau/nouveau_chan.h void *fence; fence 118 drivers/gpu/drm/nouveau/nouveau_dmem.c static void nouveau_dmem_fence_done(struct nouveau_fence **fence) fence 120 drivers/gpu/drm/nouveau/nouveau_dmem.c if (fence) { fence 121 drivers/gpu/drm/nouveau/nouveau_dmem.c nouveau_fence_wait(*fence, true, false); fence 122 drivers/gpu/drm/nouveau/nouveau_dmem.c nouveau_fence_unref(fence); fence 169 drivers/gpu/drm/nouveau/nouveau_dmem.c struct nouveau_fence *fence; fence 195 drivers/gpu/drm/nouveau/nouveau_dmem.c nouveau_fence_new(dmem->migrate.chan, false, &fence); fence 197 drivers/gpu/drm/nouveau/nouveau_dmem.c nouveau_dmem_fence_done(&fence); fence 596 drivers/gpu/drm/nouveau/nouveau_dmem.c struct nouveau_fence *fence; fence 607 drivers/gpu/drm/nouveau/nouveau_dmem.c nouveau_fence_new(drm->dmem->migrate.chan, false, &fence); fence 609 drivers/gpu/drm/nouveau/nouveau_dmem.c nouveau_dmem_fence_done(&fence); fence 124 drivers/gpu/drm/nouveau/nouveau_drm.c nouveau_cli_work_ready(struct dma_fence *fence) fence 126 drivers/gpu/drm/nouveau/nouveau_drm.c if (!dma_fence_is_signaled(fence)) fence 128 drivers/gpu/drm/nouveau/nouveau_drm.c dma_fence_put(fence); fence 139 drivers/gpu/drm/nouveau/nouveau_drm.c if (!work->fence || nouveau_cli_work_ready(work->fence)) { fence 148 drivers/gpu/drm/nouveau/nouveau_drm.c nouveau_cli_work_fence(struct dma_fence *fence, struct dma_fence_cb *cb) fence 155 drivers/gpu/drm/nouveau/nouveau_drm.c nouveau_cli_work_queue(struct nouveau_cli *cli, struct dma_fence *fence, fence 158 drivers/gpu/drm/nouveau/nouveau_drm.c work->fence = dma_fence_get(fence); fence 162 drivers/gpu/drm/nouveau/nouveau_drm.c if (dma_fence_add_callback(fence, &work->cb, nouveau_cli_work_fence)) fence 163 drivers/gpu/drm/nouveau/nouveau_drm.c nouveau_cli_work_fence(fence, &work->cb); fence 418 drivers/gpu/drm/nouveau/nouveau_drm.c if (drm->fence) fence 842 drivers/gpu/drm/nouveau/nouveau_drm.c if (drm->fence && nouveau_fence(drm)->suspend) { fence 857 drivers/gpu/drm/nouveau/nouveau_drm.c if (drm->fence && nouveau_fence(drm)->resume) fence 882 drivers/gpu/drm/nouveau/nouveau_drm.c if (drm->fence && nouveau_fence(drm)->resume) fence 71 drivers/gpu/drm/nouveau/nouveau_drv.h struct nouveau_fence *fence; fence 119 drivers/gpu/drm/nouveau/nouveau_drv.h struct dma_fence *fence; fence 172 drivers/gpu/drm/nouveau/nouveau_drv.h void *fence; fence 44 drivers/gpu/drm/nouveau/nouveau_fence.c from_fence(struct dma_fence *fence) fence 46 drivers/gpu/drm/nouveau/nouveau_fence.c return container_of(fence, struct nouveau_fence, base); fence 50 drivers/gpu/drm/nouveau/nouveau_fence.c nouveau_fctx(struct nouveau_fence *fence) fence 52 drivers/gpu/drm/nouveau/nouveau_fence.c return container_of(fence->base.lock, struct nouveau_fence_chan, lock); fence 56 drivers/gpu/drm/nouveau/nouveau_fence.c nouveau_fence_signal(struct nouveau_fence *fence) fence 60 drivers/gpu/drm/nouveau/nouveau_fence.c dma_fence_signal_locked(&fence->base); fence 61 drivers/gpu/drm/nouveau/nouveau_fence.c list_del(&fence->head); fence 62 drivers/gpu/drm/nouveau/nouveau_fence.c rcu_assign_pointer(fence->channel, NULL); fence 64 drivers/gpu/drm/nouveau/nouveau_fence.c if (test_bit(DMA_FENCE_FLAG_USER_BITS, &fence->base.flags)) { fence 65 drivers/gpu/drm/nouveau/nouveau_fence.c struct nouveau_fence_chan *fctx = nouveau_fctx(fence); fence 71 drivers/gpu/drm/nouveau/nouveau_fence.c dma_fence_put(&fence->base); fence 76 drivers/gpu/drm/nouveau/nouveau_fence.c nouveau_local_fence(struct dma_fence *fence, struct nouveau_drm *drm) fence 78 drivers/gpu/drm/nouveau/nouveau_fence.c if (fence->ops != &nouveau_fence_ops_legacy && fence 79 drivers/gpu/drm/nouveau/nouveau_fence.c fence->ops != &nouveau_fence_ops_uevent) fence 82 drivers/gpu/drm/nouveau/nouveau_fence.c if (fence->context < drm->chan.context_base || fence 83 drivers/gpu/drm/nouveau/nouveau_fence.c fence->context >= drm->chan.context_base + drm->chan.nr) fence 86 drivers/gpu/drm/nouveau/nouveau_fence.c return from_fence(fence); fence 92 drivers/gpu/drm/nouveau/nouveau_fence.c struct nouveau_fence *fence; fence 96 drivers/gpu/drm/nouveau/nouveau_fence.c fence = list_entry(fctx->pending.next, typeof(*fence), head); fence 98 drivers/gpu/drm/nouveau/nouveau_fence.c if (nouveau_fence_signal(fence)) fence 128 drivers/gpu/drm/nouveau/nouveau_fence.c struct nouveau_fence *fence; fence 133 drivers/gpu/drm/nouveau/nouveau_fence.c fence = list_entry(fctx->pending.next, typeof(*fence), head); fence 135 drivers/gpu/drm/nouveau/nouveau_fence.c if ((int)(seq - fence->base.seqno) < 0) fence 138 drivers/gpu/drm/nouveau/nouveau_fence.c drop |= nouveau_fence_signal(fence); fence 154 drivers/gpu/drm/nouveau/nouveau_fence.c struct nouveau_fence *fence; fence 157 drivers/gpu/drm/nouveau/nouveau_fence.c fence = list_entry(fctx->pending.next, typeof(*fence), head); fence 158 drivers/gpu/drm/nouveau/nouveau_fence.c chan = rcu_dereference_protected(fence->channel, lockdep_is_held(&fctx->lock)); fence 170 drivers/gpu/drm/nouveau/nouveau_fence.c struct nouveau_fence_priv *priv = (void*)chan->drm->fence; fence 201 drivers/gpu/drm/nouveau/nouveau_fence.c nouveau_fence_emit(struct nouveau_fence *fence, struct nouveau_channel *chan) fence 203 drivers/gpu/drm/nouveau/nouveau_fence.c struct nouveau_fence_chan *fctx = chan->fence; fence 204 drivers/gpu/drm/nouveau/nouveau_fence.c struct nouveau_fence_priv *priv = (void*)chan->drm->fence; fence 207 drivers/gpu/drm/nouveau/nouveau_fence.c fence->channel = chan; fence 208 drivers/gpu/drm/nouveau/nouveau_fence.c fence->timeout = jiffies + (15 * HZ); fence 211 drivers/gpu/drm/nouveau/nouveau_fence.c dma_fence_init(&fence->base, &nouveau_fence_ops_uevent, fence 214 drivers/gpu/drm/nouveau/nouveau_fence.c dma_fence_init(&fence->base, &nouveau_fence_ops_legacy, fence 218 drivers/gpu/drm/nouveau/nouveau_fence.c trace_dma_fence_emit(&fence->base); fence 219 drivers/gpu/drm/nouveau/nouveau_fence.c ret = fctx->emit(fence); fence 221 drivers/gpu/drm/nouveau/nouveau_fence.c dma_fence_get(&fence->base); fence 227 drivers/gpu/drm/nouveau/nouveau_fence.c list_add_tail(&fence->head, &fctx->pending); fence 235 drivers/gpu/drm/nouveau/nouveau_fence.c nouveau_fence_done(struct nouveau_fence *fence) fence 237 drivers/gpu/drm/nouveau/nouveau_fence.c if (fence->base.ops == &nouveau_fence_ops_legacy || fence 238 drivers/gpu/drm/nouveau/nouveau_fence.c fence->base.ops == &nouveau_fence_ops_uevent) { fence 239 drivers/gpu/drm/nouveau/nouveau_fence.c struct nouveau_fence_chan *fctx = nouveau_fctx(fence); fence 243 drivers/gpu/drm/nouveau/nouveau_fence.c if (test_bit(DMA_FENCE_FLAG_SIGNALED_BIT, &fence->base.flags)) fence 247 drivers/gpu/drm/nouveau/nouveau_fence.c chan = rcu_dereference_protected(fence->channel, lockdep_is_held(&fctx->lock)); fence 252 drivers/gpu/drm/nouveau/nouveau_fence.c return dma_fence_is_signaled(&fence->base); fence 258 drivers/gpu/drm/nouveau/nouveau_fence.c struct nouveau_fence *fence = from_fence(f); fence 262 drivers/gpu/drm/nouveau/nouveau_fence.c while (!nouveau_fence_done(fence)) { fence 291 drivers/gpu/drm/nouveau/nouveau_fence.c nouveau_fence_wait_busy(struct nouveau_fence *fence, bool intr) fence 295 drivers/gpu/drm/nouveau/nouveau_fence.c while (!nouveau_fence_done(fence)) { fence 296 drivers/gpu/drm/nouveau/nouveau_fence.c if (time_after_eq(jiffies, fence->timeout)) { fence 316 drivers/gpu/drm/nouveau/nouveau_fence.c nouveau_fence_wait(struct nouveau_fence *fence, bool lazy, bool intr) fence 321 drivers/gpu/drm/nouveau/nouveau_fence.c return nouveau_fence_wait_busy(fence, intr); fence 323 drivers/gpu/drm/nouveau/nouveau_fence.c ret = dma_fence_wait_timeout(&fence->base, intr, 15 * HZ); fence 335 drivers/gpu/drm/nouveau/nouveau_fence.c struct nouveau_fence_chan *fctx = chan->fence; fence 336 drivers/gpu/drm/nouveau/nouveau_fence.c struct dma_fence *fence; fence 350 drivers/gpu/drm/nouveau/nouveau_fence.c fence = dma_resv_get_excl(resv); fence 352 drivers/gpu/drm/nouveau/nouveau_fence.c if (fence && (!exclusive || !fobj || !fobj->shared_count)) { fence 356 drivers/gpu/drm/nouveau/nouveau_fence.c f = nouveau_local_fence(fence, chan->drm); fence 366 drivers/gpu/drm/nouveau/nouveau_fence.c ret = dma_fence_wait(fence, intr); fence 378 drivers/gpu/drm/nouveau/nouveau_fence.c fence = rcu_dereference_protected(fobj->shared[i], fence 381 drivers/gpu/drm/nouveau/nouveau_fence.c f = nouveau_local_fence(fence, chan->drm); fence 391 drivers/gpu/drm/nouveau/nouveau_fence.c ret = dma_fence_wait(fence, intr); fence 409 drivers/gpu/drm/nouveau/nouveau_fence.c struct nouveau_fence *fence; fence 412 drivers/gpu/drm/nouveau/nouveau_fence.c if (unlikely(!chan->fence)) fence 415 drivers/gpu/drm/nouveau/nouveau_fence.c fence = kzalloc(sizeof(*fence), GFP_KERNEL); fence 416 drivers/gpu/drm/nouveau/nouveau_fence.c if (!fence) fence 419 drivers/gpu/drm/nouveau/nouveau_fence.c ret = nouveau_fence_emit(fence, chan); fence 421 drivers/gpu/drm/nouveau/nouveau_fence.c nouveau_fence_unref(&fence); fence 423 drivers/gpu/drm/nouveau/nouveau_fence.c *pfence = fence; fence 427 drivers/gpu/drm/nouveau/nouveau_fence.c static const char *nouveau_fence_get_get_driver_name(struct dma_fence *fence) fence 434 drivers/gpu/drm/nouveau/nouveau_fence.c struct nouveau_fence *fence = from_fence(f); fence 435 drivers/gpu/drm/nouveau/nouveau_fence.c struct nouveau_fence_chan *fctx = nouveau_fctx(fence); fence 448 drivers/gpu/drm/nouveau/nouveau_fence.c struct nouveau_fence *fence = from_fence(f); fence 449 drivers/gpu/drm/nouveau/nouveau_fence.c struct nouveau_fence_chan *fctx = nouveau_fctx(fence); fence 454 drivers/gpu/drm/nouveau/nouveau_fence.c chan = rcu_dereference(fence->channel); fence 456 drivers/gpu/drm/nouveau/nouveau_fence.c ret = (int)(fctx->read(chan) - fence->base.seqno) >= 0; fence 464 drivers/gpu/drm/nouveau/nouveau_fence.c struct nouveau_fence *fence = from_fence(f); fence 470 drivers/gpu/drm/nouveau/nouveau_fence.c WARN_ON(kref_read(&fence->base.refcount) <= 1); fence 478 drivers/gpu/drm/nouveau/nouveau_fence.c list_del(&fence->head); fence 480 drivers/gpu/drm/nouveau/nouveau_fence.c dma_fence_put(&fence->base); fence 489 drivers/gpu/drm/nouveau/nouveau_fence.c struct nouveau_fence *fence = from_fence(f); fence 490 drivers/gpu/drm/nouveau/nouveau_fence.c struct nouveau_fence_chan *fctx = nouveau_fctx(fence); fence 493 drivers/gpu/drm/nouveau/nouveau_fence.c dma_fence_free(&fence->base); fence 507 drivers/gpu/drm/nouveau/nouveau_fence.c struct nouveau_fence *fence = from_fence(f); fence 508 drivers/gpu/drm/nouveau/nouveau_fence.c struct nouveau_fence_chan *fctx = nouveau_fctx(fence); fence 516 drivers/gpu/drm/nouveau/nouveau_fence.c set_bit(DMA_FENCE_FLAG_USER_BITS, &fence->base.flags); fence 61 drivers/gpu/drm/nouveau/nouveau_fence.h #define nouveau_fence(drm) ((struct nouveau_fence_priv *)(drm)->fence) fence 98 drivers/gpu/drm/nouveau/nouveau_gem.c nouveau_fence_unref(&vma->fence); fence 114 drivers/gpu/drm/nouveau/nouveau_gem.c struct dma_fence *fence = vma->fence ? &vma->fence->base : NULL; fence 119 drivers/gpu/drm/nouveau/nouveau_gem.c if (!fence) { fence 125 drivers/gpu/drm/nouveau/nouveau_gem.c WARN_ON(dma_fence_wait_timeout(fence, false, 2 * HZ) <= 0); fence 132 drivers/gpu/drm/nouveau/nouveau_gem.c nouveau_cli_work_queue(vma->vmm->cli, fence, &work->work); fence 331 drivers/gpu/drm/nouveau/nouveau_gem.c struct nouveau_fence *fence, fence 341 drivers/gpu/drm/nouveau/nouveau_gem.c if (likely(fence)) { fence 342 drivers/gpu/drm/nouveau/nouveau_gem.c nouveau_bo_fence(nvbo, fence, !!b->write_domains); fence 347 drivers/gpu/drm/nouveau/nouveau_gem.c nouveau_fence_unref(&vma->fence); fence 348 drivers/gpu/drm/nouveau/nouveau_gem.c dma_fence_get(&fence->base); fence 349 drivers/gpu/drm/nouveau/nouveau_gem.c vma->fence = fence; fence 367 drivers/gpu/drm/nouveau/nouveau_gem.c struct nouveau_fence *fence, fence 370 drivers/gpu/drm/nouveau/nouveau_gem.c validate_fini_no_ticket(op, chan, fence, pbbo); fence 699 drivers/gpu/drm/nouveau/nouveau_gem.c struct nouveau_fence *fence = NULL; fence 843 drivers/gpu/drm/nouveau/nouveau_gem.c ret = nouveau_fence_new(chan, false, &fence); fence 851 drivers/gpu/drm/nouveau/nouveau_gem.c validate_fini(&op, chan, fence, bo); fence 852 drivers/gpu/drm/nouveau/nouveau_gem.c nouveau_fence_unref(&fence); fence 96 drivers/gpu/drm/nouveau/nouveau_vmm.c vma->fence = NULL; fence 15 drivers/gpu/drm/nouveau/nouveau_vmm.h struct nouveau_fence *fence; fence 40 drivers/gpu/drm/nouveau/nv04_fence.c nv04_fence_emit(struct nouveau_fence *fence) fence 42 drivers/gpu/drm/nouveau/nv04_fence.c struct nouveau_channel *chan = fence->channel; fence 46 drivers/gpu/drm/nouveau/nv04_fence.c OUT_RING (chan, fence->base.seqno); fence 53 drivers/gpu/drm/nouveau/nv04_fence.c nv04_fence_sync(struct nouveau_fence *fence, fence 71 drivers/gpu/drm/nouveau/nv04_fence.c struct nv04_fence_chan *fctx = chan->fence; fence 73 drivers/gpu/drm/nouveau/nv04_fence.c chan->fence = NULL; fence 86 drivers/gpu/drm/nouveau/nv04_fence.c chan->fence = fctx; fence 95 drivers/gpu/drm/nouveau/nv04_fence.c struct nv04_fence_priv *priv = drm->fence; fence 96 drivers/gpu/drm/nouveau/nv04_fence.c drm->fence = NULL; fence 105 drivers/gpu/drm/nouveau/nv04_fence.c priv = drm->fence = kzalloc(sizeof(*priv), GFP_KERNEL); fence 30 drivers/gpu/drm/nouveau/nv10_fence.c nv10_fence_emit(struct nouveau_fence *fence) fence 32 drivers/gpu/drm/nouveau/nv10_fence.c struct nouveau_channel *chan = fence->channel; fence 36 drivers/gpu/drm/nouveau/nv10_fence.c OUT_RING (chan, fence->base.seqno); fence 44 drivers/gpu/drm/nouveau/nv10_fence.c nv10_fence_sync(struct nouveau_fence *fence, fence 59 drivers/gpu/drm/nouveau/nv10_fence.c struct nv10_fence_chan *fctx = chan->fence; fence 62 drivers/gpu/drm/nouveau/nv10_fence.c chan->fence = NULL; fence 71 drivers/gpu/drm/nouveau/nv10_fence.c fctx = chan->fence = kzalloc(sizeof(*fctx), GFP_KERNEL); fence 85 drivers/gpu/drm/nouveau/nv10_fence.c struct nv10_fence_priv *priv = drm->fence; fence 90 drivers/gpu/drm/nouveau/nv10_fence.c drm->fence = NULL; fence 99 drivers/gpu/drm/nouveau/nv10_fence.c priv = drm->fence = kzalloc(sizeof(*priv), GFP_KERNEL); fence 34 drivers/gpu/drm/nouveau/nv17_fence.c nv17_fence_sync(struct nouveau_fence *fence, fence 38 drivers/gpu/drm/nouveau/nv17_fence.c struct nv10_fence_priv *priv = chan->drm->fence; fence 39 drivers/gpu/drm/nouveau/nv17_fence.c struct nv10_fence_chan *fctx = chan->fence; fence 77 drivers/gpu/drm/nouveau/nv17_fence.c struct nv10_fence_priv *priv = chan->drm->fence; fence 84 drivers/gpu/drm/nouveau/nv17_fence.c fctx = chan->fence = kzalloc(sizeof(*fctx), GFP_KERNEL); fence 109 drivers/gpu/drm/nouveau/nv17_fence.c struct nv10_fence_priv *priv = drm->fence; fence 120 drivers/gpu/drm/nouveau/nv17_fence.c priv = drm->fence = kzalloc(sizeof(*priv), GFP_KERNEL); fence 38 drivers/gpu/drm/nouveau/nv50_fence.c struct nv10_fence_priv *priv = chan->drm->fence; fence 45 drivers/gpu/drm/nouveau/nv50_fence.c fctx = chan->fence = kzalloc(sizeof(*fctx), GFP_KERNEL); fence 73 drivers/gpu/drm/nouveau/nv50_fence.c priv = drm->fence = kzalloc(sizeof(*priv), GFP_KERNEL); fence 68 drivers/gpu/drm/nouveau/nv84_fence.c nv84_fence_emit(struct nouveau_fence *fence) fence 70 drivers/gpu/drm/nouveau/nv84_fence.c struct nouveau_channel *chan = fence->channel; fence 71 drivers/gpu/drm/nouveau/nv84_fence.c struct nv84_fence_chan *fctx = chan->fence; fence 74 drivers/gpu/drm/nouveau/nv84_fence.c return fctx->base.emit32(chan, addr, fence->base.seqno); fence 78 drivers/gpu/drm/nouveau/nv84_fence.c nv84_fence_sync(struct nouveau_fence *fence, fence 81 drivers/gpu/drm/nouveau/nv84_fence.c struct nv84_fence_chan *fctx = chan->fence; fence 84 drivers/gpu/drm/nouveau/nv84_fence.c return fctx->base.sync32(chan, addr, fence->base.seqno); fence 90 drivers/gpu/drm/nouveau/nv84_fence.c struct nv84_fence_priv *priv = chan->drm->fence; fence 97 drivers/gpu/drm/nouveau/nv84_fence.c struct nv84_fence_priv *priv = chan->drm->fence; fence 98 drivers/gpu/drm/nouveau/nv84_fence.c struct nv84_fence_chan *fctx = chan->fence; fence 105 drivers/gpu/drm/nouveau/nv84_fence.c chan->fence = NULL; fence 112 drivers/gpu/drm/nouveau/nv84_fence.c struct nv84_fence_priv *priv = chan->drm->fence; fence 116 drivers/gpu/drm/nouveau/nv84_fence.c fctx = chan->fence = kzalloc(sizeof(*fctx), GFP_KERNEL); fence 140 drivers/gpu/drm/nouveau/nv84_fence.c struct nv84_fence_priv *priv = drm->fence; fence 155 drivers/gpu/drm/nouveau/nv84_fence.c struct nv84_fence_priv *priv = drm->fence; fence 169 drivers/gpu/drm/nouveau/nv84_fence.c struct nv84_fence_priv *priv = drm->fence; fence 174 drivers/gpu/drm/nouveau/nv84_fence.c drm->fence = NULL; fence 185 drivers/gpu/drm/nouveau/nv84_fence.c priv = drm->fence = kzalloc(sizeof(*priv), GFP_KERNEL); fence 68 drivers/gpu/drm/nouveau/nvc0_fence.c struct nv84_fence_chan *fctx = chan->fence; fence 80 drivers/gpu/drm/nouveau/nvc0_fence.c struct nv84_fence_priv *priv = drm->fence; fence 53 drivers/gpu/drm/panfrost/panfrost_job.c to_panfrost_fence(struct dma_fence *fence) fence 55 drivers/gpu/drm/panfrost/panfrost_job.c return (struct panfrost_fence *)fence; fence 58 drivers/gpu/drm/panfrost/panfrost_job.c static const char *panfrost_fence_get_driver_name(struct dma_fence *fence) fence 63 drivers/gpu/drm/panfrost/panfrost_job.c static const char *panfrost_fence_get_timeline_name(struct dma_fence *fence) fence 65 drivers/gpu/drm/panfrost/panfrost_job.c struct panfrost_fence *f = to_panfrost_fence(fence); fence 86 drivers/gpu/drm/panfrost/panfrost_job.c struct panfrost_fence *fence; fence 89 drivers/gpu/drm/panfrost/panfrost_job.c fence = kzalloc(sizeof(*fence), GFP_KERNEL); fence 90 drivers/gpu/drm/panfrost/panfrost_job.c if (!fence) fence 93 drivers/gpu/drm/panfrost/panfrost_job.c fence->dev = pfdev->ddev; fence 94 drivers/gpu/drm/panfrost/panfrost_job.c fence->queue = js_num; fence 95 drivers/gpu/drm/panfrost/panfrost_job.c fence->seqno = ++js->queue[js_num].emit_seqno; fence 96 drivers/gpu/drm/panfrost/panfrost_job.c dma_fence_init(&fence->base, &panfrost_fence_ops, &js->job_lock, fence 97 drivers/gpu/drm/panfrost/panfrost_job.c js->queue[js_num].fence_context, fence->seqno); fence 99 drivers/gpu/drm/panfrost/panfrost_job.c return &fence->base; fence 202 drivers/gpu/drm/panfrost/panfrost_job.c struct dma_fence *fence) fence 207 drivers/gpu/drm/panfrost/panfrost_job.c dma_resv_add_excl_fence(bos[i]->resv, fence); fence 315 drivers/gpu/drm/panfrost/panfrost_job.c struct dma_fence *fence; fence 321 drivers/gpu/drm/panfrost/panfrost_job.c fence = job->in_fences[i]; fence 323 drivers/gpu/drm/panfrost/panfrost_job.c return fence; fence 330 drivers/gpu/drm/panfrost/panfrost_job.c fence = job->implicit_fences[i]; fence 332 drivers/gpu/drm/panfrost/panfrost_job.c return fence; fence 344 drivers/gpu/drm/panfrost/panfrost_job.c struct dma_fence *fence = NULL; fence 351 drivers/gpu/drm/panfrost/panfrost_job.c fence = panfrost_fence_create(pfdev, slot); fence 352 drivers/gpu/drm/panfrost/panfrost_job.c if (IS_ERR(fence)) fence 357 drivers/gpu/drm/panfrost/panfrost_job.c job->done_fence = dma_fence_get(fence); fence 361 drivers/gpu/drm/panfrost/panfrost_job.c return fence; fence 64 drivers/gpu/drm/qxl/qxl_debugfs.c fobj = rcu_dereference(bo->tbo.base.resv->fence); fence 47 drivers/gpu/drm/qxl/qxl_release.c static const char *qxl_get_driver_name(struct dma_fence *fence) fence 52 drivers/gpu/drm/qxl/qxl_release.c static const char *qxl_get_timeline_name(struct dma_fence *fence) fence 57 drivers/gpu/drm/qxl/qxl_release.c static long qxl_fence_wait(struct dma_fence *fence, bool intr, fence 66 drivers/gpu/drm/qxl/qxl_release.c qdev = container_of(fence->lock, struct qxl_device, release_lock); fence 67 drivers/gpu/drm/qxl/qxl_release.c release = container_of(fence, struct qxl_release, base); fence 73 drivers/gpu/drm/qxl/qxl_release.c if (dma_fence_is_signaled(fence)) fence 82 drivers/gpu/drm/qxl/qxl_release.c if (dma_fence_is_signaled(fence)) fence 86 drivers/gpu/drm/qxl/qxl_release.c if (dma_fence_is_signaled(fence)) fence 98 drivers/gpu/drm/qxl/qxl_release.c DMA_FENCE_WARN(fence, "failed to wait on release %llu " fence 100 drivers/gpu/drm/qxl/qxl_release.c fence->context & ~0xf0000000, sc); fence 3555 drivers/gpu/drm/radeon/cik.c struct radeon_fence *fence) fence 3557 drivers/gpu/drm/radeon/cik.c struct radeon_ring *ring = &rdev->ring[fence->ring]; fence 3558 drivers/gpu/drm/radeon/cik.c u64 addr = rdev->fence_drv[fence->ring].gpu_addr; fence 3571 drivers/gpu/drm/radeon/cik.c radeon_ring_write(ring, fence->seq - 1); fence 3582 drivers/gpu/drm/radeon/cik.c radeon_ring_write(ring, fence->seq); fence 3596 drivers/gpu/drm/radeon/cik.c struct radeon_fence *fence) fence 3598 drivers/gpu/drm/radeon/cik.c struct radeon_ring *ring = &rdev->ring[fence->ring]; fence 3599 drivers/gpu/drm/radeon/cik.c u64 addr = rdev->fence_drv[fence->ring].gpu_addr; fence 3610 drivers/gpu/drm/radeon/cik.c radeon_ring_write(ring, fence->seq); fence 3664 drivers/gpu/drm/radeon/cik.c struct radeon_fence *fence; fence 3705 drivers/gpu/drm/radeon/cik.c r = radeon_fence_emit(rdev, &fence, ring->idx); fence 3713 drivers/gpu/drm/radeon/cik.c radeon_sync_free(rdev, &sync, fence); fence 3715 drivers/gpu/drm/radeon/cik.c return fence; fence 3814 drivers/gpu/drm/radeon/cik.c r = radeon_fence_wait_timeout(ib.fence, false, usecs_to_jiffies( fence 3835 drivers/gpu/drm/radeon/cik.c DRM_INFO("ib test on ring %d succeeded in %u usecs\n", ib.fence->ring, i); fence 201 drivers/gpu/drm/radeon/cik_sdma.c struct radeon_fence *fence) fence 203 drivers/gpu/drm/radeon/cik_sdma.c struct radeon_ring *ring = &rdev->ring[fence->ring]; fence 204 drivers/gpu/drm/radeon/cik_sdma.c u64 addr = rdev->fence_drv[fence->ring].gpu_addr; fence 210 drivers/gpu/drm/radeon/cik_sdma.c radeon_ring_write(ring, fence->seq); fence 214 drivers/gpu/drm/radeon/cik_sdma.c cik_sdma_hdp_flush_ring_emit(rdev, fence->ring); fence 584 drivers/gpu/drm/radeon/cik_sdma.c struct radeon_fence *fence; fence 622 drivers/gpu/drm/radeon/cik_sdma.c r = radeon_fence_emit(rdev, &fence, ring->idx); fence 630 drivers/gpu/drm/radeon/cik_sdma.c radeon_sync_free(rdev, &sync, fence); fence 632 drivers/gpu/drm/radeon/cik_sdma.c return fence; fence 740 drivers/gpu/drm/radeon/cik_sdma.c r = radeon_fence_wait_timeout(ib.fence, false, usecs_to_jiffies( fence 757 drivers/gpu/drm/radeon/cik_sdma.c DRM_INFO("ib test on ring %d succeeded in %u usecs\n", ib.fence->ring, i); fence 42 drivers/gpu/drm/radeon/evergreen_dma.c struct radeon_fence *fence) fence 44 drivers/gpu/drm/radeon/evergreen_dma.c struct radeon_ring *ring = &rdev->ring[fence->ring]; fence 45 drivers/gpu/drm/radeon/evergreen_dma.c u64 addr = rdev->fence_drv[fence->ring].gpu_addr; fence 50 drivers/gpu/drm/radeon/evergreen_dma.c radeon_ring_write(ring, fence->seq); fence 113 drivers/gpu/drm/radeon/evergreen_dma.c struct radeon_fence *fence; fence 149 drivers/gpu/drm/radeon/evergreen_dma.c r = radeon_fence_emit(rdev, &fence, ring->idx); fence 157 drivers/gpu/drm/radeon/evergreen_dma.c radeon_sync_free(rdev, &sync, fence); fence 159 drivers/gpu/drm/radeon/evergreen_dma.c return fence; fence 1403 drivers/gpu/drm/radeon/ni.c struct radeon_fence *fence) fence 1405 drivers/gpu/drm/radeon/ni.c struct radeon_ring *ring = &rdev->ring[fence->ring]; fence 1406 drivers/gpu/drm/radeon/ni.c u64 addr = rdev->fence_drv[fence->ring].gpu_addr; fence 1421 drivers/gpu/drm/radeon/ni.c radeon_ring_write(ring, fence->seq); fence 859 drivers/gpu/drm/radeon/r100.c struct radeon_fence *fence) fence 861 drivers/gpu/drm/radeon/r100.c struct radeon_ring *ring = &rdev->ring[fence->ring]; fence 874 drivers/gpu/drm/radeon/r100.c radeon_ring_write(ring, PACKET0(rdev->fence_drv[fence->ring].scratch_reg, 0)); fence 875 drivers/gpu/drm/radeon/r100.c radeon_ring_write(ring, fence->seq); fence 897 drivers/gpu/drm/radeon/r100.c struct radeon_fence *fence; fence 958 drivers/gpu/drm/radeon/r100.c r = radeon_fence_emit(rdev, &fence, RADEON_RING_TYPE_GFX_INDEX); fence 964 drivers/gpu/drm/radeon/r100.c return fence; fence 3739 drivers/gpu/drm/radeon/r100.c r = radeon_fence_wait_timeout(ib.fence, false, usecs_to_jiffies( fence 90 drivers/gpu/drm/radeon/r200.c struct radeon_fence *fence; fence 122 drivers/gpu/drm/radeon/r200.c r = radeon_fence_emit(rdev, &fence, RADEON_RING_TYPE_GFX_INDEX); fence 128 drivers/gpu/drm/radeon/r200.c return fence; fence 215 drivers/gpu/drm/radeon/r300.c struct radeon_fence *fence) fence 217 drivers/gpu/drm/radeon/r300.c struct radeon_ring *ring = &rdev->ring[fence->ring]; fence 242 drivers/gpu/drm/radeon/r300.c radeon_ring_write(ring, PACKET0(rdev->fence_drv[fence->ring].scratch_reg, 0)); fence 243 drivers/gpu/drm/radeon/r300.c radeon_ring_write(ring, fence->seq); fence 2867 drivers/gpu/drm/radeon/r600.c struct radeon_fence *fence) fence 2869 drivers/gpu/drm/radeon/r600.c struct radeon_ring *ring = &rdev->ring[fence->ring]; fence 2877 drivers/gpu/drm/radeon/r600.c u64 addr = rdev->fence_drv[fence->ring].gpu_addr; fence 2889 drivers/gpu/drm/radeon/r600.c radeon_ring_write(ring, fence->seq); fence 2906 drivers/gpu/drm/radeon/r600.c radeon_ring_write(ring, ((rdev->fence_drv[fence->ring].scratch_reg - PACKET3_SET_CONFIG_REG_OFFSET) >> 2)); fence 2907 drivers/gpu/drm/radeon/r600.c radeon_ring_write(ring, fence->seq); fence 2968 drivers/gpu/drm/radeon/r600.c struct radeon_fence *fence; fence 3014 drivers/gpu/drm/radeon/r600.c r = radeon_fence_emit(rdev, &fence, ring->idx); fence 3022 drivers/gpu/drm/radeon/r600.c radeon_sync_free(rdev, &sync, fence); fence 3024 drivers/gpu/drm/radeon/r600.c return fence; fence 3427 drivers/gpu/drm/radeon/r600.c r = radeon_fence_wait_timeout(ib.fence, false, usecs_to_jiffies( fence 3445 drivers/gpu/drm/radeon/r600.c DRM_INFO("ib test on ring %d succeeded in %u usecs\n", ib.fence->ring, i); fence 288 drivers/gpu/drm/radeon/r600_dma.c struct radeon_fence *fence) fence 290 drivers/gpu/drm/radeon/r600_dma.c struct radeon_ring *ring = &rdev->ring[fence->ring]; fence 291 drivers/gpu/drm/radeon/r600_dma.c u64 addr = rdev->fence_drv[fence->ring].gpu_addr; fence 297 drivers/gpu/drm/radeon/r600_dma.c radeon_ring_write(ring, lower_32_bits(fence->seq)); fence 371 drivers/gpu/drm/radeon/r600_dma.c r = radeon_fence_wait_timeout(ib.fence, false, usecs_to_jiffies( fence 388 drivers/gpu/drm/radeon/r600_dma.c DRM_INFO("ib test on ring %d succeeded in %u usecs\n", ib.fence->ring, i); fence 449 drivers/gpu/drm/radeon/r600_dma.c struct radeon_fence *fence; fence 485 drivers/gpu/drm/radeon/r600_dma.c r = radeon_fence_emit(rdev, &fence, ring->idx); fence 493 drivers/gpu/drm/radeon/r600_dma.c radeon_sync_free(rdev, &sync, fence); fence 495 drivers/gpu/drm/radeon/r600_dma.c return fence; fence 387 drivers/gpu/drm/radeon/radeon.h int radeon_fence_emit(struct radeon_device *rdev, struct radeon_fence **fence, int ring); fence 389 drivers/gpu/drm/radeon/radeon.h bool radeon_fence_signaled(struct radeon_fence *fence); fence 390 drivers/gpu/drm/radeon/radeon.h long radeon_fence_wait_timeout(struct radeon_fence *fence, bool interruptible, long timeout); fence 391 drivers/gpu/drm/radeon/radeon.h int radeon_fence_wait(struct radeon_fence *fence, bool interruptible); fence 397 drivers/gpu/drm/radeon/radeon.h struct radeon_fence *radeon_fence_ref(struct radeon_fence *fence); fence 398 drivers/gpu/drm/radeon/radeon.h void radeon_fence_unref(struct radeon_fence **fence); fence 400 drivers/gpu/drm/radeon/radeon.h bool radeon_fence_need_sync(struct radeon_fence *fence, int ring); fence 401 drivers/gpu/drm/radeon/radeon.h void radeon_fence_note_sync(struct radeon_fence *fence, int ring); fence 564 drivers/gpu/drm/radeon/radeon.h struct radeon_fence *fence; fence 606 drivers/gpu/drm/radeon/radeon.h struct radeon_fence *fence); fence 619 drivers/gpu/drm/radeon/radeon.h struct radeon_fence *fence); fence 628 drivers/gpu/drm/radeon/radeon.h struct radeon_fence *fence); fence 744 drivers/gpu/drm/radeon/radeon.h struct dma_fence *fence; fence 827 drivers/gpu/drm/radeon/radeon.h struct radeon_fence *fence; fence 1685 drivers/gpu/drm/radeon/radeon.h uint32_t handle, struct radeon_fence **fence); fence 1687 drivers/gpu/drm/radeon/radeon.h uint32_t handle, struct radeon_fence **fence); fence 1728 drivers/gpu/drm/radeon/radeon.h uint32_t handle, struct radeon_fence **fence); fence 1730 drivers/gpu/drm/radeon/radeon.h uint32_t handle, struct radeon_fence **fence); fence 1741 drivers/gpu/drm/radeon/radeon.h struct radeon_fence *fence); fence 1822 drivers/gpu/drm/radeon/radeon.h void (*emit_fence)(struct radeon_device *rdev, struct radeon_fence *fence); fence 2732 drivers/gpu/drm/radeon/radeon.h #define radeon_fence_ring_emit(rdev, r, fence) (rdev)->asic->ring[(r)]->emit_fence((rdev), (fence)) fence 2841 drivers/gpu/drm/radeon/radeon.h int ring, struct radeon_fence *fence); fence 2844 drivers/gpu/drm/radeon/radeon.h struct radeon_fence *fence); fence 77 drivers/gpu/drm/radeon/radeon_asic.h struct radeon_fence *fence); fence 173 drivers/gpu/drm/radeon/radeon_asic.h struct radeon_fence *fence); fence 323 drivers/gpu/drm/radeon/radeon_asic.h struct radeon_fence *fence); fence 329 drivers/gpu/drm/radeon/radeon_asic.h struct radeon_fence *fence); fence 544 drivers/gpu/drm/radeon/radeon_asic.h struct radeon_fence *fence); fence 604 drivers/gpu/drm/radeon/radeon_asic.h struct radeon_fence *fence); fence 708 drivers/gpu/drm/radeon/radeon_asic.h struct radeon_fence *fence); fence 790 drivers/gpu/drm/radeon/radeon_asic.h struct radeon_fence *fence); fence 808 drivers/gpu/drm/radeon/radeon_asic.h struct radeon_fence *fence); fence 810 drivers/gpu/drm/radeon/radeon_asic.h struct radeon_fence *fence); fence 943 drivers/gpu/drm/radeon/radeon_asic.h struct radeon_fence *fence); fence 954 drivers/gpu/drm/radeon/radeon_asic.h struct radeon_fence *fence); fence 42 drivers/gpu/drm/radeon/radeon_benchmark.c struct radeon_fence *fence = NULL; fence 49 drivers/gpu/drm/radeon/radeon_benchmark.c fence = radeon_copy_dma(rdev, saddr, daddr, fence 54 drivers/gpu/drm/radeon/radeon_benchmark.c fence = radeon_copy_blit(rdev, saddr, daddr, fence 62 drivers/gpu/drm/radeon/radeon_benchmark.c if (IS_ERR(fence)) fence 63 drivers/gpu/drm/radeon/radeon_benchmark.c return PTR_ERR(fence); fence 65 drivers/gpu/drm/radeon/radeon_benchmark.c r = radeon_fence_wait(fence, false); fence 66 drivers/gpu/drm/radeon/radeon_benchmark.c radeon_fence_unref(&fence); fence 434 drivers/gpu/drm/radeon/radeon_cs.c &parser->ib.fence->base); fence 422 drivers/gpu/drm/radeon/radeon_display.c if (work->fence) { fence 423 drivers/gpu/drm/radeon/radeon_display.c struct radeon_fence *fence; fence 425 drivers/gpu/drm/radeon/radeon_display.c fence = to_radeon_fence(work->fence); fence 426 drivers/gpu/drm/radeon/radeon_display.c if (fence && fence->rdev == rdev) { fence 427 drivers/gpu/drm/radeon/radeon_display.c r = radeon_fence_wait(fence, false); fence 436 drivers/gpu/drm/radeon/radeon_display.c r = dma_fence_wait(work->fence, false); fence 446 drivers/gpu/drm/radeon/radeon_display.c dma_fence_put(work->fence); fence 447 drivers/gpu/drm/radeon/radeon_display.c work->fence = NULL; fence 538 drivers/gpu/drm/radeon/radeon_display.c work->fence = dma_fence_get(dma_resv_get_excl(new_rbo->tbo.base.resv)); fence 613 drivers/gpu/drm/radeon/radeon_display.c dma_fence_put(work->fence); fence 136 drivers/gpu/drm/radeon/radeon_fence.c struct radeon_fence **fence, fence 142 drivers/gpu/drm/radeon/radeon_fence.c *fence = kmalloc(sizeof(struct radeon_fence), GFP_KERNEL); fence 143 drivers/gpu/drm/radeon/radeon_fence.c if ((*fence) == NULL) { fence 146 drivers/gpu/drm/radeon/radeon_fence.c (*fence)->rdev = rdev; fence 147 drivers/gpu/drm/radeon/radeon_fence.c (*fence)->seq = seq = ++rdev->fence_drv[ring].sync_seq[ring]; fence 148 drivers/gpu/drm/radeon/radeon_fence.c (*fence)->ring = ring; fence 149 drivers/gpu/drm/radeon/radeon_fence.c (*fence)->is_vm_update = false; fence 150 drivers/gpu/drm/radeon/radeon_fence.c dma_fence_init(&(*fence)->base, &radeon_fence_ops, fence 154 drivers/gpu/drm/radeon/radeon_fence.c radeon_fence_ring_emit(rdev, ring, *fence); fence 155 drivers/gpu/drm/radeon/radeon_fence.c trace_radeon_fence_emit(rdev->ddev, ring, (*fence)->seq); fence 169 drivers/gpu/drm/radeon/radeon_fence.c struct radeon_fence *fence; fence 172 drivers/gpu/drm/radeon/radeon_fence.c fence = container_of(wait, struct radeon_fence, fence_wake); fence 178 drivers/gpu/drm/radeon/radeon_fence.c seq = atomic64_read(&fence->rdev->fence_drv[fence->ring].last_seq); fence 179 drivers/gpu/drm/radeon/radeon_fence.c if (seq >= fence->seq) { fence 180 drivers/gpu/drm/radeon/radeon_fence.c int ret = dma_fence_signal_locked(&fence->base); fence 183 drivers/gpu/drm/radeon/radeon_fence.c DMA_FENCE_TRACE(&fence->base, "signaled from irq context\n"); fence 185 drivers/gpu/drm/radeon/radeon_fence.c DMA_FENCE_TRACE(&fence->base, "was already signaled\n"); fence 187 drivers/gpu/drm/radeon/radeon_fence.c radeon_irq_kms_sw_irq_put(fence->rdev, fence->ring); fence 188 drivers/gpu/drm/radeon/radeon_fence.c __remove_wait_queue(&fence->rdev->fence_queue, &fence->fence_wake); fence 189 drivers/gpu/drm/radeon/radeon_fence.c dma_fence_put(&fence->base); fence 191 drivers/gpu/drm/radeon/radeon_fence.c DMA_FENCE_TRACE(&fence->base, "pending\n"); fence 364 drivers/gpu/drm/radeon/radeon_fence.c struct radeon_fence *fence = to_radeon_fence(f); fence 365 drivers/gpu/drm/radeon/radeon_fence.c struct radeon_device *rdev = fence->rdev; fence 366 drivers/gpu/drm/radeon/radeon_fence.c unsigned ring = fence->ring; fence 367 drivers/gpu/drm/radeon/radeon_fence.c u64 seq = fence->seq; fence 394 drivers/gpu/drm/radeon/radeon_fence.c struct radeon_fence *fence = to_radeon_fence(f); fence 395 drivers/gpu/drm/radeon/radeon_fence.c struct radeon_device *rdev = fence->rdev; fence 397 drivers/gpu/drm/radeon/radeon_fence.c if (atomic64_read(&rdev->fence_drv[fence->ring].last_seq) >= fence->seq) fence 401 drivers/gpu/drm/radeon/radeon_fence.c radeon_irq_kms_sw_irq_get(rdev, fence->ring); fence 403 drivers/gpu/drm/radeon/radeon_fence.c if (radeon_fence_activity(rdev, fence->ring)) fence 407 drivers/gpu/drm/radeon/radeon_fence.c if (atomic64_read(&rdev->fence_drv[fence->ring].last_seq) >= fence->seq) { fence 408 drivers/gpu/drm/radeon/radeon_fence.c radeon_irq_kms_sw_irq_put(rdev, fence->ring); fence 416 drivers/gpu/drm/radeon/radeon_fence.c if (radeon_irq_kms_sw_irq_get_delayed(rdev, fence->ring)) fence 417 drivers/gpu/drm/radeon/radeon_fence.c rdev->fence_drv[fence->ring].delayed_irq = true; fence 418 drivers/gpu/drm/radeon/radeon_fence.c radeon_fence_schedule_check(rdev, fence->ring); fence 421 drivers/gpu/drm/radeon/radeon_fence.c fence->fence_wake.flags = 0; fence 422 drivers/gpu/drm/radeon/radeon_fence.c fence->fence_wake.private = NULL; fence 423 drivers/gpu/drm/radeon/radeon_fence.c fence->fence_wake.func = radeon_fence_check_signaled; fence 424 drivers/gpu/drm/radeon/radeon_fence.c __add_wait_queue(&rdev->fence_queue, &fence->fence_wake); fence 427 drivers/gpu/drm/radeon/radeon_fence.c DMA_FENCE_TRACE(&fence->base, "armed on ring %i!\n", fence->ring); fence 439 drivers/gpu/drm/radeon/radeon_fence.c bool radeon_fence_signaled(struct radeon_fence *fence) fence 441 drivers/gpu/drm/radeon/radeon_fence.c if (!fence) fence 444 drivers/gpu/drm/radeon/radeon_fence.c if (radeon_fence_seq_signaled(fence->rdev, fence->seq, fence->ring)) { fence 447 drivers/gpu/drm/radeon/radeon_fence.c ret = dma_fence_signal(&fence->base); fence 449 drivers/gpu/drm/radeon/radeon_fence.c DMA_FENCE_TRACE(&fence->base, "signaled from radeon_fence_signaled\n"); fence 550 drivers/gpu/drm/radeon/radeon_fence.c long radeon_fence_wait_timeout(struct radeon_fence *fence, bool intr, long timeout) fence 562 drivers/gpu/drm/radeon/radeon_fence.c if (WARN_ON_ONCE(!to_radeon_fence(&fence->base))) fence 563 drivers/gpu/drm/radeon/radeon_fence.c return dma_fence_wait(&fence->base, intr); fence 565 drivers/gpu/drm/radeon/radeon_fence.c seq[fence->ring] = fence->seq; fence 566 drivers/gpu/drm/radeon/radeon_fence.c r = radeon_fence_wait_seq_timeout(fence->rdev, seq, intr, timeout); fence 571 drivers/gpu/drm/radeon/radeon_fence.c r_sig = dma_fence_signal(&fence->base); fence 573 drivers/gpu/drm/radeon/radeon_fence.c DMA_FENCE_TRACE(&fence->base, "signaled from fence_wait\n"); fence 588 drivers/gpu/drm/radeon/radeon_fence.c int radeon_fence_wait(struct radeon_fence *fence, bool intr) fence 590 drivers/gpu/drm/radeon/radeon_fence.c long r = radeon_fence_wait_timeout(fence, intr, MAX_SCHEDULE_TIMEOUT); fence 706 drivers/gpu/drm/radeon/radeon_fence.c struct radeon_fence *radeon_fence_ref(struct radeon_fence *fence) fence 708 drivers/gpu/drm/radeon/radeon_fence.c dma_fence_get(&fence->base); fence 709 drivers/gpu/drm/radeon/radeon_fence.c return fence; fence 719 drivers/gpu/drm/radeon/radeon_fence.c void radeon_fence_unref(struct radeon_fence **fence) fence 721 drivers/gpu/drm/radeon/radeon_fence.c struct radeon_fence *tmp = *fence; fence 723 drivers/gpu/drm/radeon/radeon_fence.c *fence = NULL; fence 767 drivers/gpu/drm/radeon/radeon_fence.c bool radeon_fence_need_sync(struct radeon_fence *fence, int dst_ring) fence 771 drivers/gpu/drm/radeon/radeon_fence.c if (!fence) { fence 775 drivers/gpu/drm/radeon/radeon_fence.c if (fence->ring == dst_ring) { fence 780 drivers/gpu/drm/radeon/radeon_fence.c fdrv = &fence->rdev->fence_drv[dst_ring]; fence 781 drivers/gpu/drm/radeon/radeon_fence.c if (fence->seq <= fdrv->sync_seq[fence->ring]) { fence 797 drivers/gpu/drm/radeon/radeon_fence.c void radeon_fence_note_sync(struct radeon_fence *fence, int dst_ring) fence 802 drivers/gpu/drm/radeon/radeon_fence.c if (!fence) { fence 806 drivers/gpu/drm/radeon/radeon_fence.c if (fence->ring == dst_ring) { fence 811 drivers/gpu/drm/radeon/radeon_fence.c src = &fence->rdev->fence_drv[fence->ring]; fence 812 drivers/gpu/drm/radeon/radeon_fence.c dst = &fence->rdev->fence_drv[dst_ring]; fence 1039 drivers/gpu/drm/radeon/radeon_fence.c static const char *radeon_fence_get_driver_name(struct dma_fence *fence) fence 1046 drivers/gpu/drm/radeon/radeon_fence.c struct radeon_fence *fence = to_radeon_fence(f); fence 1047 drivers/gpu/drm/radeon/radeon_fence.c switch (fence->ring) { fence 1060 drivers/gpu/drm/radeon/radeon_fence.c static inline bool radeon_test_signaled(struct radeon_fence *fence) fence 1062 drivers/gpu/drm/radeon/radeon_fence.c return test_bit(DMA_FENCE_FLAG_SIGNALED_BIT, &fence->base.flags); fence 1071 drivers/gpu/drm/radeon/radeon_fence.c radeon_fence_wait_cb(struct dma_fence *fence, struct dma_fence_cb *cb) fence 1082 drivers/gpu/drm/radeon/radeon_fence.c struct radeon_fence *fence = to_radeon_fence(f); fence 1083 drivers/gpu/drm/radeon/radeon_fence.c struct radeon_device *rdev = fence->rdev; fence 1101 drivers/gpu/drm/radeon/radeon_fence.c if (radeon_test_signaled(fence)) fence 73 drivers/gpu/drm/radeon/radeon_ib.c ib->fence = NULL; fence 99 drivers/gpu/drm/radeon/radeon_ib.c radeon_sync_free(rdev, &ib->sync, ib->fence); fence 100 drivers/gpu/drm/radeon/radeon_ib.c radeon_sa_bo_free(rdev, &ib->sa_bo, ib->fence); fence 101 drivers/gpu/drm/radeon/radeon_ib.c radeon_fence_unref(&ib->fence); fence 168 drivers/gpu/drm/radeon/radeon_ib.c r = radeon_fence_emit(rdev, &ib->fence, ib->ring); fence 175 drivers/gpu/drm/radeon/radeon_ib.c const_ib->fence = radeon_fence_ref(ib->fence); fence 179 drivers/gpu/drm/radeon/radeon_ib.c radeon_vm_fence(rdev, ib->vm, ib->fence); fence 870 drivers/gpu/drm/radeon/radeon_object.c void radeon_bo_fence(struct radeon_bo *bo, struct radeon_fence *fence, fence 876 drivers/gpu/drm/radeon/radeon_object.c dma_resv_add_shared_fence(resv, &fence->base); fence 878 drivers/gpu/drm/radeon/radeon_object.c dma_resv_add_excl_fence(resv, &fence->base); fence 157 drivers/gpu/drm/radeon/radeon_object.h extern void radeon_bo_fence(struct radeon_bo *bo, struct radeon_fence *fence, fence 190 drivers/gpu/drm/radeon/radeon_object.h struct radeon_fence *fence); fence 150 drivers/gpu/drm/radeon/radeon_sa.c radeon_fence_unref(&sa_bo->fence); fence 163 drivers/gpu/drm/radeon/radeon_sa.c if (sa_bo->fence == NULL || !radeon_fence_signaled(sa_bo->fence)) { fence 277 drivers/gpu/drm/radeon/radeon_sa.c if (!radeon_fence_signaled(sa_bo->fence)) { fence 278 drivers/gpu/drm/radeon/radeon_sa.c fences[i] = sa_bo->fence; fence 301 drivers/gpu/drm/radeon/radeon_sa.c ++tries[best_bo->fence->ring]; fence 329 drivers/gpu/drm/radeon/radeon_sa.c (*sa_bo)->fence = NULL; fence 377 drivers/gpu/drm/radeon/radeon_sa.c struct radeon_fence *fence) fence 387 drivers/gpu/drm/radeon/radeon_sa.c if (fence && !radeon_fence_signaled(fence)) { fence 388 drivers/gpu/drm/radeon/radeon_sa.c (*sa_bo)->fence = radeon_fence_ref(fence); fence 390 drivers/gpu/drm/radeon/radeon_sa.c &sa_manager->flist[fence->ring]); fence 416 drivers/gpu/drm/radeon/radeon_sa.c if (i->fence) { fence 418 drivers/gpu/drm/radeon/radeon_sa.c i->fence->seq, i->fence->ring); fence 94 drivers/gpu/drm/radeon/radeon_semaphore.c struct radeon_fence *fence) fence 103 drivers/gpu/drm/radeon/radeon_semaphore.c radeon_sa_bo_free(rdev, &(*semaphore)->sa_bo, fence); fence 63 drivers/gpu/drm/radeon/radeon_sync.c struct radeon_fence *fence) fence 67 drivers/gpu/drm/radeon/radeon_sync.c if (!fence) fence 70 drivers/gpu/drm/radeon/radeon_sync.c other = sync->sync_to[fence->ring]; fence 71 drivers/gpu/drm/radeon/radeon_sync.c sync->sync_to[fence->ring] = radeon_fence_later(fence, other); fence 73 drivers/gpu/drm/radeon/radeon_sync.c if (fence->is_vm_update) { fence 75 drivers/gpu/drm/radeon/radeon_sync.c sync->last_vm_update = radeon_fence_later(fence, other); fence 95 drivers/gpu/drm/radeon/radeon_sync.c struct radeon_fence *fence; fence 101 drivers/gpu/drm/radeon/radeon_sync.c fence = f ? to_radeon_fence(f) : NULL; fence 102 drivers/gpu/drm/radeon/radeon_sync.c if (fence && fence->rdev == rdev) fence 103 drivers/gpu/drm/radeon/radeon_sync.c radeon_sync_fence(sync, fence); fence 114 drivers/gpu/drm/radeon/radeon_sync.c fence = to_radeon_fence(f); fence 115 drivers/gpu/drm/radeon/radeon_sync.c if (fence && fence->rdev == rdev) fence 116 drivers/gpu/drm/radeon/radeon_sync.c radeon_sync_fence(sync, fence); fence 144 drivers/gpu/drm/radeon/radeon_sync.c struct radeon_fence *fence = sync->sync_to[i]; fence 148 drivers/gpu/drm/radeon/radeon_sync.c if (!radeon_fence_need_sync(fence, ring)) fence 159 drivers/gpu/drm/radeon/radeon_sync.c r = radeon_fence_wait(fence, false); fence 179 drivers/gpu/drm/radeon/radeon_sync.c r = radeon_fence_wait(fence, false); fence 189 drivers/gpu/drm/radeon/radeon_sync.c r = radeon_fence_wait(fence, false); fence 196 drivers/gpu/drm/radeon/radeon_sync.c radeon_fence_note_sync(fence, ring); fence 213 drivers/gpu/drm/radeon/radeon_sync.c struct radeon_fence *fence) fence 218 drivers/gpu/drm/radeon/radeon_sync.c radeon_semaphore_free(rdev, &sync->semaphores[i], fence); fence 88 drivers/gpu/drm/radeon/radeon_test.c struct radeon_fence *fence = NULL; fence 121 drivers/gpu/drm/radeon/radeon_test.c fence = radeon_copy_dma(rdev, gtt_addr, vram_addr, fence 125 drivers/gpu/drm/radeon/radeon_test.c fence = radeon_copy_blit(rdev, gtt_addr, vram_addr, fence 128 drivers/gpu/drm/radeon/radeon_test.c if (IS_ERR(fence)) { fence 130 drivers/gpu/drm/radeon/radeon_test.c r = PTR_ERR(fence); fence 134 drivers/gpu/drm/radeon/radeon_test.c r = radeon_fence_wait(fence, false); fence 140 drivers/gpu/drm/radeon/radeon_test.c radeon_fence_unref(&fence); fence 172 drivers/gpu/drm/radeon/radeon_test.c fence = radeon_copy_dma(rdev, vram_addr, gtt_addr, fence 176 drivers/gpu/drm/radeon/radeon_test.c fence = radeon_copy_blit(rdev, vram_addr, gtt_addr, fence 179 drivers/gpu/drm/radeon/radeon_test.c if (IS_ERR(fence)) { fence 181 drivers/gpu/drm/radeon/radeon_test.c r = PTR_ERR(fence); fence 185 drivers/gpu/drm/radeon/radeon_test.c r = radeon_fence_wait(fence, false); fence 191 drivers/gpu/drm/radeon/radeon_test.c radeon_fence_unref(&fence); fence 237 drivers/gpu/drm/radeon/radeon_test.c if (fence && !IS_ERR(fence)) fence 238 drivers/gpu/drm/radeon/radeon_test.c radeon_fence_unref(&fence); fence 264 drivers/gpu/drm/radeon/radeon_test.c struct radeon_fence **fence) fence 276 drivers/gpu/drm/radeon/radeon_test.c r = radeon_uvd_get_destroy_msg(rdev, ring->idx, handle, fence); fence 290 drivers/gpu/drm/radeon/radeon_test.c r = radeon_vce_get_destroy_msg(rdev, ring->idx, handle, fence); fence 302 drivers/gpu/drm/radeon/radeon_test.c r = radeon_fence_emit(rdev, fence, ring->idx); fence 208 drivers/gpu/drm/radeon/radeon_ttm.c struct radeon_fence *fence; fence 247 drivers/gpu/drm/radeon/radeon_ttm.c fence = radeon_copy(rdev, old_start, new_start, num_pages, bo->base.resv); fence 248 drivers/gpu/drm/radeon/radeon_ttm.c if (IS_ERR(fence)) fence 249 drivers/gpu/drm/radeon/radeon_ttm.c return PTR_ERR(fence); fence 251 drivers/gpu/drm/radeon/radeon_ttm.c r = ttm_bo_move_accel_cleanup(bo, &fence->base, evict, new_mem); fence 252 drivers/gpu/drm/radeon/radeon_ttm.c radeon_fence_unref(&fence); fence 259 drivers/gpu/drm/radeon/radeon_uvd.c struct radeon_fence *fence; fence 264 drivers/gpu/drm/radeon/radeon_uvd.c R600_RING_TYPE_UVD_INDEX, handle, &fence); fence 270 drivers/gpu/drm/radeon/radeon_uvd.c radeon_fence_wait(fence, false); fence 271 drivers/gpu/drm/radeon/radeon_uvd.c radeon_fence_unref(&fence); fence 334 drivers/gpu/drm/radeon/radeon_uvd.c struct radeon_fence *fence; fence 339 drivers/gpu/drm/radeon/radeon_uvd.c R600_RING_TYPE_UVD_INDEX, handle, &fence); fence 345 drivers/gpu/drm/radeon/radeon_uvd.c radeon_fence_wait(fence, false); fence 346 drivers/gpu/drm/radeon/radeon_uvd.c radeon_fence_unref(&fence); fence 742 drivers/gpu/drm/radeon/radeon_uvd.c struct radeon_fence **fence) fence 765 drivers/gpu/drm/radeon/radeon_uvd.c if (fence) fence 766 drivers/gpu/drm/radeon/radeon_uvd.c *fence = radeon_fence_ref(ib.fence); fence 778 drivers/gpu/drm/radeon/radeon_uvd.c uint32_t handle, struct radeon_fence **fence) fence 808 drivers/gpu/drm/radeon/radeon_uvd.c r = radeon_uvd_send_msg(rdev, ring, addr, fence); fence 814 drivers/gpu/drm/radeon/radeon_uvd.c uint32_t handle, struct radeon_fence **fence) fence 837 drivers/gpu/drm/radeon/radeon_uvd.c r = radeon_uvd_send_msg(rdev, ring, addr, fence); fence 347 drivers/gpu/drm/radeon/radeon_vce.c uint32_t handle, struct radeon_fence **fence) fence 395 drivers/gpu/drm/radeon/radeon_vce.c if (fence) fence 396 drivers/gpu/drm/radeon/radeon_vce.c *fence = radeon_fence_ref(ib.fence); fence 414 drivers/gpu/drm/radeon/radeon_vce.c uint32_t handle, struct radeon_fence **fence) fence 452 drivers/gpu/drm/radeon/radeon_vce.c if (fence) fence 453 drivers/gpu/drm/radeon/radeon_vce.c *fence = radeon_fence_ref(ib.fence); fence 736 drivers/gpu/drm/radeon/radeon_vce.c struct radeon_fence *fence) fence 738 drivers/gpu/drm/radeon/radeon_vce.c struct radeon_ring *ring = &rdev->ring[fence->ring]; fence 739 drivers/gpu/drm/radeon/radeon_vce.c uint64_t addr = rdev->fence_drv[fence->ring].gpu_addr; fence 744 drivers/gpu/drm/radeon/radeon_vce.c radeon_ring_write(ring, cpu_to_le32(fence->seq)); fence 798 drivers/gpu/drm/radeon/radeon_vce.c struct radeon_fence *fence = NULL; fence 807 drivers/gpu/drm/radeon/radeon_vce.c r = radeon_vce_get_destroy_msg(rdev, ring->idx, 1, &fence); fence 813 drivers/gpu/drm/radeon/radeon_vce.c r = radeon_fence_wait_timeout(fence, false, usecs_to_jiffies( fence 825 drivers/gpu/drm/radeon/radeon_vce.c radeon_fence_unref(&fence); fence 196 drivers/gpu/drm/radeon/radeon_vm.c struct radeon_fence *fence = rdev->vm_manager.active[i]; fence 198 drivers/gpu/drm/radeon/radeon_vm.c if (fence == NULL) { fence 205 drivers/gpu/drm/radeon/radeon_vm.c if (radeon_fence_is_earlier(fence, best[fence->ring])) { fence 206 drivers/gpu/drm/radeon/radeon_vm.c best[fence->ring] = fence; fence 207 drivers/gpu/drm/radeon/radeon_vm.c choices[fence->ring == ring ? 0 : 1] = i; fence 270 drivers/gpu/drm/radeon/radeon_vm.c struct radeon_fence *fence) fence 272 drivers/gpu/drm/radeon/radeon_vm.c unsigned vm_id = vm->ids[fence->ring].id; fence 275 drivers/gpu/drm/radeon/radeon_vm.c rdev->vm_manager.active[vm_id] = radeon_fence_ref(fence); fence 277 drivers/gpu/drm/radeon/radeon_vm.c radeon_fence_unref(&vm->ids[fence->ring].last_id_use); fence 278 drivers/gpu/drm/radeon/radeon_vm.c vm->ids[fence->ring].last_id_use = radeon_fence_ref(fence); fence 421 drivers/gpu/drm/radeon/radeon_vm.c ib.fence->is_vm_update = true; fence 422 drivers/gpu/drm/radeon/radeon_vm.c radeon_bo_fence(bo, ib.fence, false); fence 712 drivers/gpu/drm/radeon/radeon_vm.c ib.fence->is_vm_update = true; fence 713 drivers/gpu/drm/radeon/radeon_vm.c radeon_bo_fence(pd, ib.fence, false); fence 888 drivers/gpu/drm/radeon/radeon_vm.c struct radeon_fence *fence) fence 896 drivers/gpu/drm/radeon/radeon_vm.c radeon_bo_fence(vm->page_tables[i].bo, fence, true); fence 1028 drivers/gpu/drm/radeon/radeon_vm.c ib.fence->is_vm_update = true; fence 1029 drivers/gpu/drm/radeon/radeon_vm.c radeon_vm_fence_pts(vm, bo_va->it.start, bo_va->it.last + 1, ib.fence); fence 1031 drivers/gpu/drm/radeon/radeon_vm.c bo_va->last_pt_update = radeon_fence_ref(ib.fence); fence 47 drivers/gpu/drm/radeon/rv770_dma.c struct radeon_fence *fence; fence 83 drivers/gpu/drm/radeon/rv770_dma.c r = radeon_fence_emit(rdev, &fence, ring->idx); fence 91 drivers/gpu/drm/radeon/rv770_dma.c radeon_sync_free(rdev, &sync, fence); fence 93 drivers/gpu/drm/radeon/rv770_dma.c return fence; fence 3375 drivers/gpu/drm/radeon/si.c struct radeon_fence *fence) fence 3377 drivers/gpu/drm/radeon/si.c struct radeon_ring *ring = &rdev->ring[fence->ring]; fence 3378 drivers/gpu/drm/radeon/si.c u64 addr = rdev->fence_drv[fence->ring].gpu_addr; fence 3397 drivers/gpu/drm/radeon/si.c radeon_ring_write(ring, fence->seq); fence 236 drivers/gpu/drm/radeon/si_dma.c struct radeon_fence *fence; fence 272 drivers/gpu/drm/radeon/si_dma.c r = radeon_fence_emit(rdev, &fence, ring->idx); fence 280 drivers/gpu/drm/radeon/si_dma.c radeon_sync_free(rdev, &sync, fence); fence 282 drivers/gpu/drm/radeon/si_dma.c return fence; fence 82 drivers/gpu/drm/radeon/uvd_v1_0.c struct radeon_fence *fence) fence 84 drivers/gpu/drm/radeon/uvd_v1_0.c struct radeon_ring *ring = &rdev->ring[fence->ring]; fence 85 drivers/gpu/drm/radeon/uvd_v1_0.c uint64_t addr = rdev->fence_drv[fence->ring].gpu_addr; fence 90 drivers/gpu/drm/radeon/uvd_v1_0.c radeon_ring_write(ring, fence->seq); fence 502 drivers/gpu/drm/radeon/uvd_v1_0.c struct radeon_fence *fence = NULL; fence 520 drivers/gpu/drm/radeon/uvd_v1_0.c r = radeon_uvd_get_destroy_msg(rdev, ring->idx, 1, &fence); fence 526 drivers/gpu/drm/radeon/uvd_v1_0.c r = radeon_fence_wait_timeout(fence, false, usecs_to_jiffies( fence 539 drivers/gpu/drm/radeon/uvd_v1_0.c radeon_fence_unref(&fence); fence 40 drivers/gpu/drm/radeon/uvd_v2_2.c struct radeon_fence *fence) fence 42 drivers/gpu/drm/radeon/uvd_v2_2.c struct radeon_ring *ring = &rdev->ring[fence->ring]; fence 43 drivers/gpu/drm/radeon/uvd_v2_2.c uint64_t addr = rdev->fence_drv[fence->ring].gpu_addr; fence 46 drivers/gpu/drm/radeon/uvd_v2_2.c radeon_ring_write(ring, fence->seq); fence 40 drivers/gpu/drm/scheduler/gpu_scheduler_trace.h __field(struct dma_fence *, fence) fence 50 drivers/gpu/drm/scheduler/gpu_scheduler_trace.h __entry->fence = &sched_job->s_fence->finished; fence 58 drivers/gpu/drm/scheduler/gpu_scheduler_trace.h __entry->fence, __entry->name, fence 63 drivers/gpu/drm/scheduler/gpu_scheduler_trace.h TP_PROTO(struct drm_sched_fence *fence), fence 64 drivers/gpu/drm/scheduler/gpu_scheduler_trace.h TP_ARGS(fence), fence 66 drivers/gpu/drm/scheduler/gpu_scheduler_trace.h __field(struct dma_fence *, fence) fence 70 drivers/gpu/drm/scheduler/gpu_scheduler_trace.h __entry->fence = &fence->finished; fence 72 drivers/gpu/drm/scheduler/gpu_scheduler_trace.h TP_printk("fence=%p signaled", __entry->fence) fence 76 drivers/gpu/drm/scheduler/gpu_scheduler_trace.h TP_PROTO(struct drm_sched_job *sched_job, struct dma_fence *fence), fence 77 drivers/gpu/drm/scheduler/gpu_scheduler_trace.h TP_ARGS(sched_job, fence), fence 81 drivers/gpu/drm/scheduler/gpu_scheduler_trace.h __field(struct dma_fence *, fence) fence 89 drivers/gpu/drm/scheduler/gpu_scheduler_trace.h __entry->fence = fence; fence 90 drivers/gpu/drm/scheduler/gpu_scheduler_trace.h __entry->ctx = fence->context; fence 91 drivers/gpu/drm/scheduler/gpu_scheduler_trace.h __entry->seqno = fence->seqno; fence 95 drivers/gpu/drm/scheduler/gpu_scheduler_trace.h __entry->fence, __entry->ctx, fence 400 drivers/gpu/drm/scheduler/sched_entity.c struct dma_fence *fence = entity->dependency; fence 403 drivers/gpu/drm/scheduler/sched_entity.c if (fence->context == entity->fence_context || fence 404 drivers/gpu/drm/scheduler/sched_entity.c fence->context == entity->fence_context + 1) { fence 414 drivers/gpu/drm/scheduler/sched_entity.c s_fence = to_drm_sched_fence(fence); fence 421 drivers/gpu/drm/scheduler/sched_entity.c fence = dma_fence_get(&s_fence->scheduled); fence 423 drivers/gpu/drm/scheduler/sched_entity.c entity->dependency = fence; fence 424 drivers/gpu/drm/scheduler/sched_entity.c if (!dma_fence_add_callback(fence, &entity->cb, fence 429 drivers/gpu/drm/scheduler/sched_entity.c dma_fence_put(fence); fence 486 drivers/gpu/drm/scheduler/sched_entity.c struct dma_fence *fence; fence 492 drivers/gpu/drm/scheduler/sched_entity.c fence = READ_ONCE(entity->last_scheduled); fence 493 drivers/gpu/drm/scheduler/sched_entity.c if (fence && !dma_fence_is_signaled(fence)) fence 51 drivers/gpu/drm/scheduler/sched_fence.c void drm_sched_fence_scheduled(struct drm_sched_fence *fence) fence 53 drivers/gpu/drm/scheduler/sched_fence.c int ret = dma_fence_signal(&fence->scheduled); fence 56 drivers/gpu/drm/scheduler/sched_fence.c DMA_FENCE_TRACE(&fence->scheduled, fence 59 drivers/gpu/drm/scheduler/sched_fence.c DMA_FENCE_TRACE(&fence->scheduled, fence 63 drivers/gpu/drm/scheduler/sched_fence.c void drm_sched_fence_finished(struct drm_sched_fence *fence) fence 65 drivers/gpu/drm/scheduler/sched_fence.c int ret = dma_fence_signal(&fence->finished); fence 68 drivers/gpu/drm/scheduler/sched_fence.c DMA_FENCE_TRACE(&fence->finished, fence 71 drivers/gpu/drm/scheduler/sched_fence.c DMA_FENCE_TRACE(&fence->finished, fence 75 drivers/gpu/drm/scheduler/sched_fence.c static const char *drm_sched_fence_get_driver_name(struct dma_fence *fence) fence 82 drivers/gpu/drm/scheduler/sched_fence.c struct drm_sched_fence *fence = to_drm_sched_fence(f); fence 83 drivers/gpu/drm/scheduler/sched_fence.c return (const char *)fence->sched->name; fence 96 drivers/gpu/drm/scheduler/sched_fence.c struct drm_sched_fence *fence = to_drm_sched_fence(f); fence 98 drivers/gpu/drm/scheduler/sched_fence.c kmem_cache_free(sched_fence_slab, fence); fence 111 drivers/gpu/drm/scheduler/sched_fence.c struct drm_sched_fence *fence = to_drm_sched_fence(f); fence 113 drivers/gpu/drm/scheduler/sched_fence.c dma_fence_put(fence->parent); fence 114 drivers/gpu/drm/scheduler/sched_fence.c call_rcu(&fence->finished.rcu, drm_sched_fence_free); fence 126 drivers/gpu/drm/scheduler/sched_fence.c struct drm_sched_fence *fence = to_drm_sched_fence(f); fence 128 drivers/gpu/drm/scheduler/sched_fence.c dma_fence_put(&fence->scheduled); fence 158 drivers/gpu/drm/scheduler/sched_fence.c struct drm_sched_fence *fence = NULL; fence 161 drivers/gpu/drm/scheduler/sched_fence.c fence = kmem_cache_zalloc(sched_fence_slab, GFP_KERNEL); fence 162 drivers/gpu/drm/scheduler/sched_fence.c if (fence == NULL) fence 165 drivers/gpu/drm/scheduler/sched_fence.c fence->owner = owner; fence 166 drivers/gpu/drm/scheduler/sched_fence.c fence->sched = entity->rq->sched; fence 167 drivers/gpu/drm/scheduler/sched_fence.c spin_lock_init(&fence->lock); fence 170 drivers/gpu/drm/scheduler/sched_fence.c dma_fence_init(&fence->scheduled, &drm_sched_fence_ops_scheduled, fence 171 drivers/gpu/drm/scheduler/sched_fence.c &fence->lock, entity->fence_context, seq); fence 172 drivers/gpu/drm/scheduler/sched_fence.c dma_fence_init(&fence->finished, &drm_sched_fence_ops_finished, fence 173 drivers/gpu/drm/scheduler/sched_fence.c &fence->lock, entity->fence_context + 1, seq); fence 175 drivers/gpu/drm/scheduler/sched_fence.c return fence; fence 168 drivers/gpu/drm/scheduler/sched_main.c bool drm_sched_dependency_optimized(struct dma_fence* fence, fence 174 drivers/gpu/drm/scheduler/sched_main.c if (!fence || dma_fence_is_signaled(fence)) fence 176 drivers/gpu/drm/scheduler/sched_main.c if (fence->context == entity->fence_context) fence 178 drivers/gpu/drm/scheduler/sched_main.c s_fence = to_drm_sched_fence(fence); fence 442 drivers/gpu/drm/scheduler/sched_main.c struct dma_fence *fence = s_job->s_fence->parent; fence 449 drivers/gpu/drm/scheduler/sched_main.c if (fence) { fence 450 drivers/gpu/drm/scheduler/sched_main.c r = dma_fence_add_callback(fence, &s_job->cb, fence 453 drivers/gpu/drm/scheduler/sched_main.c drm_sched_process_job(fence, &s_job->cb); fence 482 drivers/gpu/drm/scheduler/sched_main.c struct dma_fence *fence; fence 496 drivers/gpu/drm/scheduler/sched_main.c fence = sched->ops->run_job(s_job); fence 498 drivers/gpu/drm/scheduler/sched_main.c if (IS_ERR_OR_NULL(fence)) { fence 500 drivers/gpu/drm/scheduler/sched_main.c dma_fence_set_error(&s_fence->finished, PTR_ERR(fence)); fence 502 drivers/gpu/drm/scheduler/sched_main.c s_job->s_fence->parent = fence; fence 710 drivers/gpu/drm/scheduler/sched_main.c struct dma_fence *fence; fence 737 drivers/gpu/drm/scheduler/sched_main.c fence = sched->ops->run_job(sched_job); fence 740 drivers/gpu/drm/scheduler/sched_main.c if (!IS_ERR_OR_NULL(fence)) { fence 741 drivers/gpu/drm/scheduler/sched_main.c s_fence->parent = dma_fence_get(fence); fence 742 drivers/gpu/drm/scheduler/sched_main.c r = dma_fence_add_callback(fence, &sched_job->cb, fence 745 drivers/gpu/drm/scheduler/sched_main.c drm_sched_process_job(fence, &sched_job->cb); fence 749 drivers/gpu/drm/scheduler/sched_main.c dma_fence_put(fence); fence 752 drivers/gpu/drm/scheduler/sched_main.c dma_fence_set_error(&s_fence->finished, PTR_ERR(fence)); fence 491 drivers/gpu/drm/tegra/drm.c args->fence = job->syncpt_end; fence 458 drivers/gpu/drm/ttm/ttm_bo.c struct dma_fence *fence; fence 462 drivers/gpu/drm/ttm/ttm_bo.c fence = dma_resv_get_excl(&bo->base._resv); fence 463 drivers/gpu/drm/ttm/ttm_bo.c if (fence && !fence->ops->signaled) fence 464 drivers/gpu/drm/ttm/ttm_bo.c dma_fence_enable_sw_signaling(fence); fence 467 drivers/gpu/drm/ttm/ttm_bo.c fence = rcu_dereference_protected(fobj->shared[i], fence 470 drivers/gpu/drm/ttm/ttm_bo.c if (!fence->ops->signaled) fence 471 drivers/gpu/drm/ttm/ttm_bo.c dma_fence_enable_sw_signaling(fence); fence 934 drivers/gpu/drm/ttm/ttm_bo.c struct dma_fence *fence; fence 938 drivers/gpu/drm/ttm/ttm_bo.c fence = dma_fence_get(man->move); fence 941 drivers/gpu/drm/ttm/ttm_bo.c if (!fence) fence 947 drivers/gpu/drm/ttm/ttm_bo.c dma_resv_add_shared_fence(bo->base.resv, fence); fence 951 drivers/gpu/drm/ttm/ttm_bo.c dma_fence_put(fence); fence 956 drivers/gpu/drm/ttm/ttm_bo.c bo->moving = fence; fence 1495 drivers/gpu/drm/ttm/ttm_bo.c struct dma_fence *fence; fence 1517 drivers/gpu/drm/ttm/ttm_bo.c fence = dma_fence_get(man->move); fence 1520 drivers/gpu/drm/ttm/ttm_bo.c if (fence) { fence 1521 drivers/gpu/drm/ttm/ttm_bo.c ret = dma_fence_wait(fence, false); fence 1522 drivers/gpu/drm/ttm/ttm_bo.c dma_fence_put(fence); fence 682 drivers/gpu/drm/ttm/ttm_bo_util.c struct dma_fence *fence, fence 692 drivers/gpu/drm/ttm/ttm_bo_util.c dma_resv_add_excl_fence(bo->base.resv, fence); fence 713 drivers/gpu/drm/ttm/ttm_bo_util.c bo->moving = dma_fence_get(fence); fence 719 drivers/gpu/drm/ttm/ttm_bo_util.c dma_resv_add_excl_fence(ghost_obj->base.resv, fence); fence 744 drivers/gpu/drm/ttm/ttm_bo_util.c struct dma_fence *fence, bool evict, fence 755 drivers/gpu/drm/ttm/ttm_bo_util.c dma_resv_add_excl_fence(bo->base.resv, fence); fence 769 drivers/gpu/drm/ttm/ttm_bo_util.c bo->moving = dma_fence_get(fence); fence 775 drivers/gpu/drm/ttm/ttm_bo_util.c dma_resv_add_excl_fence(ghost_obj->base.resv, fence); fence 799 drivers/gpu/drm/ttm/ttm_bo_util.c if (!from->move || dma_fence_is_later(fence, from->move)) { fence 801 drivers/gpu/drm/ttm/ttm_bo_util.c from->move = dma_fence_get(fence); fence 808 drivers/gpu/drm/ttm/ttm_bo_util.c bo->moving = dma_fence_get(fence); fence 187 drivers/gpu/drm/ttm/ttm_execbuf_util.c struct dma_fence *fence) fence 204 drivers/gpu/drm/ttm/ttm_execbuf_util.c dma_resv_add_shared_fence(bo->base.resv, fence); fence 206 drivers/gpu/drm/ttm/ttm_execbuf_util.c dma_resv_add_excl_fence(bo->base.resv, fence); fence 166 drivers/gpu/drm/v3d/v3d_drv.h to_v3d_fence(struct dma_fence *fence) fence 168 drivers/gpu/drm/v3d/v3d_drv.h return (struct v3d_fence *)fence; fence 8 drivers/gpu/drm/v3d/v3d_fence.c struct v3d_fence *fence; fence 10 drivers/gpu/drm/v3d/v3d_fence.c fence = kzalloc(sizeof(*fence), GFP_KERNEL); fence 11 drivers/gpu/drm/v3d/v3d_fence.c if (!fence) fence 14 drivers/gpu/drm/v3d/v3d_fence.c fence->dev = &v3d->drm; fence 15 drivers/gpu/drm/v3d/v3d_fence.c fence->queue = queue; fence 16 drivers/gpu/drm/v3d/v3d_fence.c fence->seqno = ++v3d->queue[queue].emit_seqno; fence 17 drivers/gpu/drm/v3d/v3d_fence.c dma_fence_init(&fence->base, &v3d_fence_ops, &v3d->job_lock, fence 18 drivers/gpu/drm/v3d/v3d_fence.c v3d->queue[queue].fence_context, fence->seqno); fence 20 drivers/gpu/drm/v3d/v3d_fence.c return &fence->base; fence 23 drivers/gpu/drm/v3d/v3d_fence.c static const char *v3d_fence_get_driver_name(struct dma_fence *fence) fence 28 drivers/gpu/drm/v3d/v3d_fence.c static const char *v3d_fence_get_timeline_name(struct dma_fence *fence) fence 30 drivers/gpu/drm/v3d/v3d_fence.c struct v3d_fence *f = to_v3d_fence(fence); fence 356 drivers/gpu/drm/v3d/v3d_gem.c struct dma_fence *fence; fence 365 drivers/gpu/drm/v3d/v3d_gem.c xa_for_each(&job->deps, index, fence) { fence 366 drivers/gpu/drm/v3d/v3d_gem.c dma_fence_put(fence); fence 101 drivers/gpu/drm/v3d/v3d_irq.c struct v3d_fence *fence = fence 104 drivers/gpu/drm/v3d/v3d_irq.c trace_v3d_bcl_irq(&v3d->drm, fence->seqno); fence 105 drivers/gpu/drm/v3d/v3d_irq.c dma_fence_signal(&fence->base); fence 110 drivers/gpu/drm/v3d/v3d_irq.c struct v3d_fence *fence = fence 113 drivers/gpu/drm/v3d/v3d_irq.c trace_v3d_rcl_irq(&v3d->drm, fence->seqno); fence 114 drivers/gpu/drm/v3d/v3d_irq.c dma_fence_signal(&fence->base); fence 119 drivers/gpu/drm/v3d/v3d_irq.c struct v3d_fence *fence = fence 122 drivers/gpu/drm/v3d/v3d_irq.c trace_v3d_csd_irq(&v3d->drm, fence->seqno); fence 123 drivers/gpu/drm/v3d/v3d_irq.c dma_fence_signal(&fence->base); fence 155 drivers/gpu/drm/v3d/v3d_irq.c struct v3d_fence *fence = fence 158 drivers/gpu/drm/v3d/v3d_irq.c trace_v3d_tfu_irq(&v3d->drm, fence->seqno); fence 159 drivers/gpu/drm/v3d/v3d_irq.c dma_fence_signal(&fence->base); fence 93 drivers/gpu/drm/v3d/v3d_sched.c struct dma_fence *fence; fence 112 drivers/gpu/drm/v3d/v3d_sched.c fence = v3d_fence_create(v3d, V3D_BIN); fence 113 drivers/gpu/drm/v3d/v3d_sched.c if (IS_ERR(fence)) fence 118 drivers/gpu/drm/v3d/v3d_sched.c job->base.irq_fence = dma_fence_get(fence); fence 120 drivers/gpu/drm/v3d/v3d_sched.c trace_v3d_submit_cl(dev, false, to_v3d_fence(fence)->seqno, fence 138 drivers/gpu/drm/v3d/v3d_sched.c return fence; fence 146 drivers/gpu/drm/v3d/v3d_sched.c struct dma_fence *fence; fence 161 drivers/gpu/drm/v3d/v3d_sched.c fence = v3d_fence_create(v3d, V3D_RENDER); fence 162 drivers/gpu/drm/v3d/v3d_sched.c if (IS_ERR(fence)) fence 167 drivers/gpu/drm/v3d/v3d_sched.c job->base.irq_fence = dma_fence_get(fence); fence 169 drivers/gpu/drm/v3d/v3d_sched.c trace_v3d_submit_cl(dev, true, to_v3d_fence(fence)->seqno, fence 180 drivers/gpu/drm/v3d/v3d_sched.c return fence; fence 189 drivers/gpu/drm/v3d/v3d_sched.c struct dma_fence *fence; fence 191 drivers/gpu/drm/v3d/v3d_sched.c fence = v3d_fence_create(v3d, V3D_TFU); fence 192 drivers/gpu/drm/v3d/v3d_sched.c if (IS_ERR(fence)) fence 198 drivers/gpu/drm/v3d/v3d_sched.c job->base.irq_fence = dma_fence_get(fence); fence 200 drivers/gpu/drm/v3d/v3d_sched.c trace_v3d_submit_tfu(dev, to_v3d_fence(fence)->seqno); fence 217 drivers/gpu/drm/v3d/v3d_sched.c return fence; fence 226 drivers/gpu/drm/v3d/v3d_sched.c struct dma_fence *fence; fence 233 drivers/gpu/drm/v3d/v3d_sched.c fence = v3d_fence_create(v3d, V3D_CSD); fence 234 drivers/gpu/drm/v3d/v3d_sched.c if (IS_ERR(fence)) fence 239 drivers/gpu/drm/v3d/v3d_sched.c job->base.irq_fence = dma_fence_get(fence); fence 241 drivers/gpu/drm/v3d/v3d_sched.c trace_v3d_submit_csd(dev, to_v3d_fence(fence)->seqno); fence 248 drivers/gpu/drm/v3d/v3d_sched.c return fence; fence 297 drivers/gpu/drm/vc4/vc4_drv.h to_vc4_fence(struct dma_fence *fence) fence 299 drivers/gpu/drm/vc4/vc4_drv.h return (struct vc4_fence *)fence; fence 492 drivers/gpu/drm/vc4/vc4_drv.h struct dma_fence *fence; fence 26 drivers/gpu/drm/vc4/vc4_fence.c static const char *vc4_fence_get_driver_name(struct dma_fence *fence) fence 31 drivers/gpu/drm/vc4/vc4_fence.c static const char *vc4_fence_get_timeline_name(struct dma_fence *fence) fence 36 drivers/gpu/drm/vc4/vc4_fence.c static bool vc4_fence_signaled(struct dma_fence *fence) fence 38 drivers/gpu/drm/vc4/vc4_fence.c struct vc4_fence *f = to_vc4_fence(fence); fence 546 drivers/gpu/drm/vc4/vc4_gem.c dma_resv_add_shared_fence(bo->base.base.resv, exec->fence); fence 557 drivers/gpu/drm/vc4/vc4_gem.c dma_resv_add_excl_fence(bo->base.base.resv, exec->fence); fence 673 drivers/gpu/drm/vc4/vc4_gem.c struct vc4_fence *fence; fence 675 drivers/gpu/drm/vc4/vc4_gem.c fence = kzalloc(sizeof(*fence), GFP_KERNEL); fence 676 drivers/gpu/drm/vc4/vc4_gem.c if (!fence) fence 678 drivers/gpu/drm/vc4/vc4_gem.c fence->dev = dev; fence 685 drivers/gpu/drm/vc4/vc4_gem.c dma_fence_init(&fence->base, &vc4_fence_ops, &vc4->job_lock, fence 687 drivers/gpu/drm/vc4/vc4_gem.c fence->seqno = exec->seqno; fence 688 drivers/gpu/drm/vc4/vc4_gem.c exec->fence = &fence->base; fence 691 drivers/gpu/drm/vc4/vc4_gem.c drm_syncobj_replace_fence(out_sync, exec->fence); fence 951 drivers/gpu/drm/vc4/vc4_gem.c if (exec->fence) { fence 952 drivers/gpu/drm/vc4/vc4_gem.c dma_fence_signal(exec->fence); fence 953 drivers/gpu/drm/vc4/vc4_gem.c dma_fence_put(exec->fence); fence 185 drivers/gpu/drm/vc4/vc4_irq.c if (exec->fence) { fence 186 drivers/gpu/drm/vc4/vc4_irq.c dma_fence_signal_locked(exec->fence); fence 187 drivers/gpu/drm/vc4/vc4_irq.c dma_fence_put(exec->fence); fence 188 drivers/gpu/drm/vc4/vc4_irq.c exec->fence = NULL; fence 38 drivers/gpu/drm/vgem/vgem_fence.c static const char *vgem_fence_get_driver_name(struct dma_fence *fence) fence 43 drivers/gpu/drm/vgem/vgem_fence.c static const char *vgem_fence_get_timeline_name(struct dma_fence *fence) fence 50 drivers/gpu/drm/vgem/vgem_fence.c struct vgem_fence *fence = container_of(base, typeof(*fence), base); fence 52 drivers/gpu/drm/vgem/vgem_fence.c del_timer_sync(&fence->timer); fence 53 drivers/gpu/drm/vgem/vgem_fence.c dma_fence_free(&fence->base); fence 56 drivers/gpu/drm/vgem/vgem_fence.c static void vgem_fence_value_str(struct dma_fence *fence, char *str, int size) fence 58 drivers/gpu/drm/vgem/vgem_fence.c snprintf(str, size, "%llu", fence->seqno); fence 61 drivers/gpu/drm/vgem/vgem_fence.c static void vgem_fence_timeline_value_str(struct dma_fence *fence, char *str, fence 65 drivers/gpu/drm/vgem/vgem_fence.c dma_fence_is_signaled(fence) ? fence->seqno : 0); fence 79 drivers/gpu/drm/vgem/vgem_fence.c struct vgem_fence *fence = from_timer(fence, t, timer); fence 81 drivers/gpu/drm/vgem/vgem_fence.c dma_fence_signal(&fence->base); fence 87 drivers/gpu/drm/vgem/vgem_fence.c struct vgem_fence *fence; fence 89 drivers/gpu/drm/vgem/vgem_fence.c fence = kzalloc(sizeof(*fence), GFP_KERNEL); fence 90 drivers/gpu/drm/vgem/vgem_fence.c if (!fence) fence 93 drivers/gpu/drm/vgem/vgem_fence.c spin_lock_init(&fence->lock); fence 94 drivers/gpu/drm/vgem/vgem_fence.c dma_fence_init(&fence->base, &vgem_fence_ops, &fence->lock, fence 97 drivers/gpu/drm/vgem/vgem_fence.c timer_setup(&fence->timer, vgem_fence_timeout, 0); fence 100 drivers/gpu/drm/vgem/vgem_fence.c mod_timer(&fence->timer, jiffies + VGEM_FENCE_TIMEOUT); fence 102 drivers/gpu/drm/vgem/vgem_fence.c return &fence->base; fence 133 drivers/gpu/drm/vgem/vgem_fence.c struct dma_fence *fence; fence 146 drivers/gpu/drm/vgem/vgem_fence.c fence = vgem_fence_create(vfile, arg->flags); fence 147 drivers/gpu/drm/vgem/vgem_fence.c if (!fence) { fence 164 drivers/gpu/drm/vgem/vgem_fence.c dma_resv_add_excl_fence(resv, fence); fence 166 drivers/gpu/drm/vgem/vgem_fence.c dma_resv_add_shared_fence(resv, fence); fence 172 drivers/gpu/drm/vgem/vgem_fence.c ret = idr_alloc(&vfile->fence_idr, fence, 1, 0, GFP_KERNEL); fence 181 drivers/gpu/drm/vgem/vgem_fence.c dma_fence_signal(fence); fence 182 drivers/gpu/drm/vgem/vgem_fence.c dma_fence_put(fence); fence 211 drivers/gpu/drm/vgem/vgem_fence.c struct dma_fence *fence; fence 218 drivers/gpu/drm/vgem/vgem_fence.c fence = idr_replace(&vfile->fence_idr, NULL, arg->fence); fence 220 drivers/gpu/drm/vgem/vgem_fence.c if (!fence) fence 222 drivers/gpu/drm/vgem/vgem_fence.c if (IS_ERR(fence)) fence 223 drivers/gpu/drm/vgem/vgem_fence.c return PTR_ERR(fence); fence 225 drivers/gpu/drm/vgem/vgem_fence.c if (dma_fence_is_signaled(fence)) fence 228 drivers/gpu/drm/vgem/vgem_fence.c dma_fence_signal(fence); fence 229 drivers/gpu/drm/vgem/vgem_fence.c dma_fence_put(fence); fence 145 drivers/gpu/drm/virtio/virtgpu_drv.h struct virtio_gpu_fence *fence; fence 246 drivers/gpu/drm/virtio/virtgpu_drv.h struct virtio_gpu_fence *fence); fence 260 drivers/gpu/drm/virtio/virtgpu_drv.h struct virtio_gpu_fence *fence); fence 268 drivers/gpu/drm/virtio/virtgpu_drv.h struct virtio_gpu_fence *fence); fence 279 drivers/gpu/drm/virtio/virtgpu_drv.h struct virtio_gpu_fence *fence); fence 304 drivers/gpu/drm/virtio/virtgpu_drv.h uint32_t ctx_id, struct virtio_gpu_fence *fence); fence 309 drivers/gpu/drm/virtio/virtgpu_drv.h struct virtio_gpu_fence *fence); fence 315 drivers/gpu/drm/virtio/virtgpu_drv.h struct virtio_gpu_fence *fence); fence 320 drivers/gpu/drm/virtio/virtgpu_drv.h struct virtio_gpu_fence *fence); fence 353 drivers/gpu/drm/virtio/virtgpu_drv.h struct virtio_gpu_fence *fence); fence 361 drivers/gpu/drm/virtio/virtgpu_drv.h struct virtio_gpu_fence *fence); fence 42 drivers/gpu/drm/virtio/virtgpu_fence.c struct virtio_gpu_fence *fence = to_virtio_fence(f); fence 44 drivers/gpu/drm/virtio/virtgpu_fence.c if (atomic64_read(&fence->drv->last_seq) >= fence->f.seqno) fence 56 drivers/gpu/drm/virtio/virtgpu_fence.c struct virtio_gpu_fence *fence = to_virtio_fence(f); fence 58 drivers/gpu/drm/virtio/virtgpu_fence.c snprintf(str, size, "%llu", (u64)atomic64_read(&fence->drv->last_seq)); fence 72 drivers/gpu/drm/virtio/virtgpu_fence.c struct virtio_gpu_fence *fence = kzalloc(sizeof(struct virtio_gpu_fence), fence 74 drivers/gpu/drm/virtio/virtgpu_fence.c if (!fence) fence 75 drivers/gpu/drm/virtio/virtgpu_fence.c return fence; fence 77 drivers/gpu/drm/virtio/virtgpu_fence.c fence->drv = drv; fence 83 drivers/gpu/drm/virtio/virtgpu_fence.c dma_fence_init(&fence->f, &virtio_fence_ops, &drv->lock, drv->context, 0); fence 85 drivers/gpu/drm/virtio/virtgpu_fence.c return fence; fence 90 drivers/gpu/drm/virtio/virtgpu_fence.c struct virtio_gpu_fence *fence) fence 96 drivers/gpu/drm/virtio/virtgpu_fence.c fence->f.seqno = ++drv->sync_seq; fence 97 drivers/gpu/drm/virtio/virtgpu_fence.c dma_fence_get(&fence->f); fence 98 drivers/gpu/drm/virtio/virtgpu_fence.c list_add_tail(&fence->node, &drv->fences); fence 101 drivers/gpu/drm/virtio/virtgpu_fence.c trace_dma_fence_emit(&fence->f); fence 104 drivers/gpu/drm/virtio/virtgpu_fence.c cmd_hdr->fence_id = cpu_to_le64(fence->f.seqno); fence 111 drivers/gpu/drm/virtio/virtgpu_fence.c struct virtio_gpu_fence *fence, *tmp; fence 116 drivers/gpu/drm/virtio/virtgpu_fence.c list_for_each_entry_safe(fence, tmp, &drv->fences, node) { fence 117 drivers/gpu/drm/virtio/virtgpu_fence.c if (last_seq < fence->f.seqno) fence 119 drivers/gpu/drm/virtio/virtgpu_fence.c dma_fence_signal_locked(&fence->f); fence 120 drivers/gpu/drm/virtio/virtgpu_fence.c list_del(&fence->node); fence 121 drivers/gpu/drm/virtio/virtgpu_fence.c dma_fence_put(&fence->f); fence 42 drivers/gpu/drm/virtio/virtgpu_gem.c struct virtio_gpu_fence *fence) fence 48 drivers/gpu/drm/virtio/virtgpu_gem.c ret = virtio_gpu_object_create(vgdev, params, &obj, fence); fence 278 drivers/gpu/drm/virtio/virtgpu_ioctl.c struct virtio_gpu_fence *fence; fence 316 drivers/gpu/drm/virtio/virtgpu_ioctl.c fence = virtio_gpu_fence_alloc(vgdev); fence 317 drivers/gpu/drm/virtio/virtgpu_ioctl.c if (!fence) fence 319 drivers/gpu/drm/virtio/virtgpu_ioctl.c qobj = virtio_gpu_alloc_object(dev, ¶ms, fence); fence 320 drivers/gpu/drm/virtio/virtgpu_ioctl.c dma_fence_put(&fence->f); fence 366 drivers/gpu/drm/virtio/virtgpu_ioctl.c struct virtio_gpu_fence *fence; fence 390 drivers/gpu/drm/virtio/virtgpu_ioctl.c fence = virtio_gpu_fence_alloc(vgdev); fence 391 drivers/gpu/drm/virtio/virtgpu_ioctl.c if (!fence) { fence 398 drivers/gpu/drm/virtio/virtgpu_ioctl.c &box, fence); fence 400 drivers/gpu/drm/virtio/virtgpu_ioctl.c &fence->f); fence 402 drivers/gpu/drm/virtio/virtgpu_ioctl.c dma_fence_put(&fence->f); fence 419 drivers/gpu/drm/virtio/virtgpu_ioctl.c struct virtio_gpu_fence *fence; fence 444 drivers/gpu/drm/virtio/virtgpu_ioctl.c fence = virtio_gpu_fence_alloc(vgdev); fence 445 drivers/gpu/drm/virtio/virtgpu_ioctl.c if (!fence) { fence 452 drivers/gpu/drm/virtio/virtgpu_ioctl.c args->level, &box, fence); fence 454 drivers/gpu/drm/virtio/virtgpu_ioctl.c &fence->f); fence 455 drivers/gpu/drm/virtio/virtgpu_ioctl.c dma_fence_put(&fence->f); fence 104 drivers/gpu/drm/virtio/virtgpu_object.c struct virtio_gpu_fence *fence) fence 133 drivers/gpu/drm/virtio/virtgpu_object.c virtio_gpu_cmd_resource_create_3d(vgdev, bo, params, fence); fence 135 drivers/gpu/drm/virtio/virtgpu_object.c virtio_gpu_cmd_create_resource(vgdev, bo, params, fence); fence 147 drivers/gpu/drm/virtio/virtgpu_object.c if (fence) { fence 166 drivers/gpu/drm/virtio/virtgpu_object.c signaled = virtio_fence_signaled(&fence->f); fence 170 drivers/gpu/drm/virtio/virtgpu_object.c &fence->f); fence 157 drivers/gpu/drm/virtio/virtgpu_plane.c vgfb->fence = virtio_gpu_fence_alloc(vgdev); fence 158 drivers/gpu/drm/virtio/virtgpu_plane.c if (!vgfb->fence) fence 174 drivers/gpu/drm/virtio/virtgpu_plane.c if (vgfb->fence) { fence 175 drivers/gpu/drm/virtio/virtgpu_plane.c dma_fence_put(&vgfb->fence->f); fence 176 drivers/gpu/drm/virtio/virtgpu_plane.c vgfb->fence = NULL; fence 212 drivers/gpu/drm/virtio/virtgpu_plane.c 0, 0, vgfb->fence); fence 216 drivers/gpu/drm/virtio/virtgpu_plane.c &vgfb->fence->f); fence 217 drivers/gpu/drm/virtio/virtgpu_plane.c dma_fence_put(&vgfb->fence->f); fence 218 drivers/gpu/drm/virtio/virtgpu_plane.c vgfb->fence = NULL; fence 317 drivers/gpu/drm/virtio/virtgpu_vq.c struct virtio_gpu_fence *fence) fence 339 drivers/gpu/drm/virtio/virtgpu_vq.c if (fence) fence 340 drivers/gpu/drm/virtio/virtgpu_vq.c virtio_gpu_fence_emit(vgdev, hdr, fence); fence 391 drivers/gpu/drm/virtio/virtgpu_vq.c struct virtio_gpu_fence *fence) fence 405 drivers/gpu/drm/virtio/virtgpu_vq.c virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, &cmd_p->hdr, fence); fence 426 drivers/gpu/drm/virtio/virtgpu_vq.c struct virtio_gpu_fence *fence) fence 437 drivers/gpu/drm/virtio/virtgpu_vq.c virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, &cmd_p->hdr, fence); fence 488 drivers/gpu/drm/virtio/virtgpu_vq.c struct virtio_gpu_fence *fence) fence 510 drivers/gpu/drm/virtio/virtgpu_vq.c virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, &cmd_p->hdr, fence); fence 518 drivers/gpu/drm/virtio/virtgpu_vq.c struct virtio_gpu_fence *fence) fence 533 drivers/gpu/drm/virtio/virtgpu_vq.c virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, &cmd_p->hdr, fence); fence 864 drivers/gpu/drm/virtio/virtgpu_vq.c struct virtio_gpu_fence *fence) fence 886 drivers/gpu/drm/virtio/virtgpu_vq.c virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, &cmd_p->hdr, fence); fence 895 drivers/gpu/drm/virtio/virtgpu_vq.c struct virtio_gpu_fence *fence) fence 916 drivers/gpu/drm/virtio/virtgpu_vq.c virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, &cmd_p->hdr, fence); fence 923 drivers/gpu/drm/virtio/virtgpu_vq.c struct virtio_gpu_fence *fence) fence 938 drivers/gpu/drm/virtio/virtgpu_vq.c virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, &cmd_p->hdr, fence); fence 943 drivers/gpu/drm/virtio/virtgpu_vq.c uint32_t ctx_id, struct virtio_gpu_fence *fence) fence 958 drivers/gpu/drm/virtio/virtgpu_vq.c virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, &cmd_p->hdr, fence); fence 963 drivers/gpu/drm/virtio/virtgpu_vq.c struct virtio_gpu_fence *fence) fence 1008 drivers/gpu/drm/virtio/virtgpu_vq.c fence); fence 1018 drivers/gpu/drm/virtio/virtgpu_vq.c struct virtio_gpu_fence *fence = virtio_gpu_fence_alloc(vgdev); fence 1020 drivers/gpu/drm/virtio/virtgpu_vq.c virtio_gpu_cmd_resource_inval_backing(vgdev, obj->hw_res_handle, fence); fence 1021 drivers/gpu/drm/virtio/virtgpu_vq.c dma_fence_wait(&fence->f, true); fence 1022 drivers/gpu/drm/virtio/virtgpu_vq.c dma_fence_put(&fence->f); fence 1002 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c struct vmw_fence_obj *fence) fence 1009 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c if (fence == NULL) { fence 1010 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c vmw_execbuf_fence_commands(NULL, dev_priv, &fence, NULL); fence 1011 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c dma_resv_add_excl_fence(bo->base.resv, &fence->base); fence 1012 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c dma_fence_put(&fence->base); fence 1014 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c dma_resv_add_excl_fence(bo->base.resv, &fence->base); fence 367 drivers/gpu/drm/vmwgfx/vmwgfx_context.c struct vmw_fence_obj *fence; fence 417 drivers/gpu/drm/vmwgfx/vmwgfx_context.c &fence, NULL); fence 419 drivers/gpu/drm/vmwgfx/vmwgfx_context.c vmw_bo_fence_single(bo, fence); fence 421 drivers/gpu/drm/vmwgfx/vmwgfx_context.c if (likely(fence != NULL)) fence 422 drivers/gpu/drm/vmwgfx/vmwgfx_context.c vmw_fence_obj_unreference(&fence); fence 574 drivers/gpu/drm/vmwgfx/vmwgfx_context.c struct vmw_fence_obj *fence; fence 631 drivers/gpu/drm/vmwgfx/vmwgfx_context.c &fence, NULL); fence 633 drivers/gpu/drm/vmwgfx/vmwgfx_context.c vmw_bo_fence_single(bo, fence); fence 635 drivers/gpu/drm/vmwgfx/vmwgfx_context.c if (likely(fence != NULL)) fence 636 drivers/gpu/drm/vmwgfx/vmwgfx_context.c vmw_fence_obj_unreference(&fence); fence 310 drivers/gpu/drm/vmwgfx/vmwgfx_cotable.c struct vmw_fence_obj *fence; fence 322 drivers/gpu/drm/vmwgfx/vmwgfx_cotable.c (void) vmw_execbuf_fence_commands(NULL, dev_priv, &fence, NULL); fence 323 drivers/gpu/drm/vmwgfx/vmwgfx_cotable.c vmw_bo_fence_single(bo, fence); fence 324 drivers/gpu/drm/vmwgfx/vmwgfx_cotable.c if (likely(fence != NULL)) fence 325 drivers/gpu/drm/vmwgfx/vmwgfx_cotable.c vmw_fence_obj_unreference(&fence); fence 347 drivers/gpu/drm/vmwgfx/vmwgfx_cotable.c struct vmw_fence_obj *fence; fence 362 drivers/gpu/drm/vmwgfx/vmwgfx_cotable.c (void) vmw_execbuf_fence_commands(NULL, dev_priv, &fence, NULL); fence 363 drivers/gpu/drm/vmwgfx/vmwgfx_cotable.c vmw_bo_fence_single(&res->backup->base, fence); fence 364 drivers/gpu/drm/vmwgfx/vmwgfx_cotable.c vmw_fence_obj_unreference(&fence); fence 790 drivers/gpu/drm/vmwgfx/vmwgfx_drv.h struct vmw_fence_obj *fence); fence 998 drivers/gpu/drm/vmwgfx/vmwgfx_drv.h struct vmw_fence_obj *fence); fence 1010 drivers/gpu/drm/vmwgfx/vmwgfx_drv.h struct vmw_fence_obj *fence, fence 3420 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c struct vmw_fence_obj *fence, uint32_t fence_handle, fence 3433 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c BUG_ON(fence == NULL); fence 3436 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c fence_rep.seqno = fence->base.seqno; fence 3465 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c (void) vmw_fence_obj_wait(fence, false, false, fence 3639 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c struct vmw_fence_obj *fence = NULL; fence 3775 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c ret = vmw_execbuf_fence_commands(file_priv, dev_priv, &fence, fence 3789 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c vmw_validation_bo_fence(sw_context->ctx, fence); fence 3792 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c __vmw_execbuf_release_pinned_bo(dev_priv, fence); fence 3801 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c sync_file = sync_file_create(&fence->base); fence 3807 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c (void) vmw_fence_obj_wait(fence, false, false, fence 3816 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c user_fence_rep, fence, handle, out_fence_fd, fence 3821 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c *out_fence = fence; fence 3822 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c fence = NULL; fence 3823 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c } else if (likely(fence != NULL)) { fence 3824 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c vmw_fence_obj_unreference(&fence); fence 3916 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c struct vmw_fence_obj *fence) fence 3940 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c BUG_ON(fence != NULL); fence 3952 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c if (fence == NULL) { fence 3955 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c fence = lfence; fence 3957 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c vmw_validation_bo_fence(&val_ctx, fence); fence 55 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c struct vmw_fence_obj fence; fence 77 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c struct vmw_fence_obj *fence; fence 85 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c fman_from_fence(struct vmw_fence_obj *fence) fence 87 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c return container_of(fence->base.lock, struct vmw_fence_manager, lock); fence 114 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c struct vmw_fence_obj *fence = fence 117 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c struct vmw_fence_manager *fman = fman_from_fence(fence); fence 120 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c list_del_init(&fence->head); fence 123 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c fence->destroy(fence); fence 138 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c struct vmw_fence_obj *fence = fence 141 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c struct vmw_fence_manager *fman = fman_from_fence(fence); fence 146 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c if (seqno - fence->base.seqno < VMW_FENCE_WRAP) fence 160 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c vmwgfx_wait_cb(struct dma_fence *fence, struct dma_fence_cb *cb) fence 172 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c struct vmw_fence_obj *fence = fence 175 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c struct vmw_fence_manager *fman = fman_from_fence(fence); fence 180 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c if (likely(vmw_fence_obj_signaled(fence))) fence 340 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c struct vmw_fence_obj *fence, u32 seqno, fence 341 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c void (*destroy) (struct vmw_fence_obj *fence)) fence 345 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c dma_fence_init(&fence->base, &vmw_fence_ops, &fman->lock, fence 347 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c INIT_LIST_HEAD(&fence->seq_passed_actions); fence 348 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c fence->destroy = destroy; fence 355 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c list_add_tail(&fence->head, &fman->fence_list); fence 405 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c struct vmw_fence_obj *fence; fence 416 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c list_for_each_entry(fence, &fman->fence_list, head) { fence 417 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c if (!list_empty(&fence->seq_passed_actions)) { fence 419 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c vmw_mmio_write(fence->base.seqno, fence 444 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c static bool vmw_fence_goal_check_locked(struct vmw_fence_obj *fence) fence 446 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c struct vmw_fence_manager *fman = fman_from_fence(fence); fence 450 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c if (dma_fence_is_signaled_locked(&fence->base)) fence 456 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c goal_seqno - fence->base.seqno < VMW_FENCE_WRAP)) fence 459 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c vmw_mmio_write(fence->base.seqno, fifo_mem + SVGA_FIFO_FENCE_GOAL); fence 467 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c struct vmw_fence_obj *fence, *next_fence; fence 475 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c list_for_each_entry_safe(fence, next_fence, &fman->fence_list, head) { fence 476 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c if (seqno - fence->base.seqno < VMW_FENCE_WRAP) { fence 477 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c list_del_init(&fence->head); fence 478 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c dma_fence_signal_locked(&fence->base); fence 480 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c list_splice_init(&fence->seq_passed_actions, fence 513 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c bool vmw_fence_obj_signaled(struct vmw_fence_obj *fence) fence 515 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c struct vmw_fence_manager *fman = fman_from_fence(fence); fence 517 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c if (test_bit(DMA_FENCE_FLAG_SIGNALED_BIT, &fence->base.flags)) fence 522 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c return dma_fence_is_signaled(&fence->base); fence 525 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c int vmw_fence_obj_wait(struct vmw_fence_obj *fence, bool lazy, fence 528 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c long ret = dma_fence_wait_timeout(&fence->base, interruptible, timeout); fence 538 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c void vmw_fence_obj_flush(struct vmw_fence_obj *fence) fence 540 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c struct vmw_private *dev_priv = fman_from_fence(fence)->dev_priv; fence 545 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c static void vmw_fence_destroy(struct vmw_fence_obj *fence) fence 547 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c dma_fence_free(&fence->base); fence 554 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c struct vmw_fence_obj *fence; fence 557 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c fence = kzalloc(sizeof(*fence), GFP_KERNEL); fence 558 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c if (unlikely(!fence)) fence 561 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c ret = vmw_fence_obj_init(fman, fence, seqno, fence 566 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c *p_fence = fence; fence 570 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c kfree(fence); fence 575 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c static void vmw_user_fence_destroy(struct vmw_fence_obj *fence) fence 578 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c container_of(fence, struct vmw_user_fence, fence); fence 579 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c struct vmw_fence_manager *fman = fman_from_fence(fence); fence 594 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c struct vmw_fence_obj *fence = &ufence->fence; fence 597 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c vmw_fence_obj_unreference(&fence); fence 632 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c ret = vmw_fence_obj_init(fman, &ufence->fence, seqno, fence 643 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c tmp = vmw_fence_obj_reference(&ufence->fence); fence 657 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c *p_fence = &ufence->fence; fence 662 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c tmp = &ufence->fence; fence 680 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c struct dma_fence *fence) fence 687 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c if (dma_fence_is_signaled(fence)) fence 690 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c if (!dma_fence_is_array(fence)) fence 691 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c return dma_fence_wait(fence, true); fence 701 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c fence_array = to_dma_fence_array(fence); fence 732 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c struct vmw_fence_obj *fence = fence 735 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c dma_fence_get(&fence->base); fence 738 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c ret = vmw_fence_obj_wait(fence, false, false, fence 742 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c list_del_init(&fence->head); fence 743 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c dma_fence_signal(&fence->base); fence 745 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c list_splice_init(&fence->seq_passed_actions, fence 750 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c BUG_ON(!list_empty(&fence->head)); fence 751 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c dma_fence_put(&fence->base); fence 807 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c struct vmw_fence_obj *fence; fence 829 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c fence = &(container_of(base, struct vmw_user_fence, base)->fence); fence 833 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c ret = ((vmw_fence_obj_signaled(fence)) ? fence 840 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c ret = vmw_fence_obj_wait(fence, arg->lazy, true, timeout); fence 861 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c struct vmw_fence_obj *fence; fence 870 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c fence = &(container_of(base, struct vmw_user_fence, base)->fence); fence 871 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c fman = fman_from_fence(fence); fence 873 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c arg->signaled = vmw_fence_obj_signaled(fence); fence 947 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c vmw_fence_obj_unreference(&eaction->fence); fence 961 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c static void vmw_fence_obj_add_action(struct vmw_fence_obj *fence, fence 964 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c struct vmw_fence_manager *fman = fman_from_fence(fence); fence 971 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c if (dma_fence_is_signaled_locked(&fence->base)) { fence 978 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c list_add_tail(&action->head, &fence->seq_passed_actions); fence 984 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c run_update = vmw_fence_goal_check_locked(fence); fence 1016 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c struct vmw_fence_obj *fence, fence 1023 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c struct vmw_fence_manager *fman = fman_from_fence(fence); fence 1035 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c eaction->fence = vmw_fence_obj_reference(fence); fence 1040 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c vmw_fence_obj_add_action(fence, &eaction->action); fence 1051 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c struct vmw_fence_obj *fence, fence 1057 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c struct vmw_fence_manager *fman = fman_from_fence(fence); fence 1081 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c ret = vmw_event_fence_action_queue(file_priv, fence, fence 1087 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c ret = vmw_event_fence_action_queue(file_priv, fence, fence 1109 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c struct vmw_fence_obj *fence = NULL; fence 1130 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c fence = &(container_of(base, struct vmw_user_fence, fence 1131 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c base)->fence); fence 1132 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c (void) vmw_fence_obj_reference(fence); fence 1150 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c if (!fence) { fence 1152 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c &fence, fence 1161 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c BUG_ON(fence == NULL); fence 1163 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c ret = vmw_event_fence_action_create(file_priv, fence, fence 1173 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c vmw_execbuf_copy_fence_user(dev_priv, vmw_fp, 0, user_fence_rep, fence, fence 1175 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c vmw_fence_obj_unreference(&fence); fence 1181 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c vmw_fence_obj_unreference(&fence); fence 63 drivers/gpu/drm/vmwgfx/vmwgfx_fence.h void (*destroy)(struct vmw_fence_obj *fence); fence 74 drivers/gpu/drm/vmwgfx/vmwgfx_fence.h struct vmw_fence_obj *fence = *fence_p; fence 77 drivers/gpu/drm/vmwgfx/vmwgfx_fence.h if (fence) fence 78 drivers/gpu/drm/vmwgfx/vmwgfx_fence.h dma_fence_put(&fence->base); fence 82 drivers/gpu/drm/vmwgfx/vmwgfx_fence.h vmw_fence_obj_reference(struct vmw_fence_obj *fence) fence 84 drivers/gpu/drm/vmwgfx/vmwgfx_fence.h if (fence) fence 85 drivers/gpu/drm/vmwgfx/vmwgfx_fence.h dma_fence_get(&fence->base); fence 86 drivers/gpu/drm/vmwgfx/vmwgfx_fence.h return fence; fence 91 drivers/gpu/drm/vmwgfx/vmwgfx_fence.h extern bool vmw_fence_obj_signaled(struct vmw_fence_obj *fence); fence 93 drivers/gpu/drm/vmwgfx/vmwgfx_fence.h extern int vmw_fence_obj_wait(struct vmw_fence_obj *fence, fence 97 drivers/gpu/drm/vmwgfx/vmwgfx_fence.h extern void vmw_fence_obj_flush(struct vmw_fence_obj *fence); fence 110 drivers/gpu/drm/vmwgfx/vmwgfx_fence.h struct dma_fence *fence); fence 127 drivers/gpu/drm/vmwgfx/vmwgfx_fence.h struct vmw_fence_obj *fence, fence 571 drivers/gpu/drm/vmwgfx/vmwgfx_fifo.c cmd_fence->fence = *seqno; fence 2561 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c struct vmw_fence_obj *fence = NULL; fence 2567 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c ret = vmw_execbuf_fence_commands(file_priv, dev_priv, &fence, fence 2569 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c vmw_validation_done(ctx, fence); fence 2572 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c ret, user_fence_rep, fence, fence 2575 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c *out_fence = fence; fence 2577 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c vmw_fence_obj_unreference(&fence); fence 47 drivers/gpu/drm/vmwgfx/vmwgfx_reg.h u32 fence; fence 809 drivers/gpu/drm/vmwgfx/vmwgfx_resource.c struct vmw_fence_obj *fence; fence 815 drivers/gpu/drm/vmwgfx/vmwgfx_resource.c (void) vmw_execbuf_fence_commands(NULL, dev_priv, &fence, NULL); fence 816 drivers/gpu/drm/vmwgfx/vmwgfx_resource.c vmw_bo_fence_single(bo, fence); fence 818 drivers/gpu/drm/vmwgfx/vmwgfx_resource.c if (fence != NULL) fence 819 drivers/gpu/drm/vmwgfx/vmwgfx_resource.c vmw_fence_obj_unreference(&fence); fence 732 drivers/gpu/drm/vmwgfx/vmwgfx_scrn.c struct vmw_fence_obj *fence = NULL; fence 743 drivers/gpu/drm/vmwgfx/vmwgfx_scrn.c old_state, vfb, &fence); fence 747 drivers/gpu/drm/vmwgfx/vmwgfx_scrn.c &fence); fence 757 drivers/gpu/drm/vmwgfx/vmwgfx_scrn.c if (event && fence) { fence 761 drivers/gpu/drm/vmwgfx/vmwgfx_scrn.c fence, fence 773 drivers/gpu/drm/vmwgfx/vmwgfx_scrn.c if (fence) fence 774 drivers/gpu/drm/vmwgfx/vmwgfx_scrn.c vmw_fence_obj_unreference(&fence); fence 283 drivers/gpu/drm/vmwgfx/vmwgfx_shader.c struct vmw_fence_obj *fence; fence 303 drivers/gpu/drm/vmwgfx/vmwgfx_shader.c &fence, NULL); fence 305 drivers/gpu/drm/vmwgfx/vmwgfx_shader.c vmw_bo_fence_single(val_buf->bo, fence); fence 307 drivers/gpu/drm/vmwgfx/vmwgfx_shader.c if (likely(fence != NULL)) fence 308 drivers/gpu/drm/vmwgfx/vmwgfx_shader.c vmw_fence_obj_unreference(&fence); fence 513 drivers/gpu/drm/vmwgfx/vmwgfx_shader.c struct vmw_fence_obj *fence; fence 526 drivers/gpu/drm/vmwgfx/vmwgfx_shader.c &fence, NULL); fence 527 drivers/gpu/drm/vmwgfx/vmwgfx_shader.c vmw_bo_fence_single(val_buf->bo, fence); fence 529 drivers/gpu/drm/vmwgfx/vmwgfx_shader.c if (likely(fence != NULL)) fence 530 drivers/gpu/drm/vmwgfx/vmwgfx_shader.c vmw_fence_obj_unreference(&fence); fence 1596 drivers/gpu/drm/vmwgfx/vmwgfx_stdu.c struct vmw_fence_obj *fence = NULL; fence 1617 drivers/gpu/drm/vmwgfx/vmwgfx_stdu.c old_state, vfb, &fence); fence 1621 drivers/gpu/drm/vmwgfx/vmwgfx_stdu.c &fence); fence 1646 drivers/gpu/drm/vmwgfx/vmwgfx_stdu.c if (event && fence) { fence 1650 drivers/gpu/drm/vmwgfx/vmwgfx_stdu.c fence, fence 1661 drivers/gpu/drm/vmwgfx/vmwgfx_stdu.c if (fence) fence 1662 drivers/gpu/drm/vmwgfx/vmwgfx_stdu.c vmw_fence_obj_unreference(&fence); fence 462 drivers/gpu/drm/vmwgfx/vmwgfx_surface.c struct vmw_fence_obj *fence; fence 484 drivers/gpu/drm/vmwgfx/vmwgfx_surface.c &fence, NULL); fence 486 drivers/gpu/drm/vmwgfx/vmwgfx_surface.c vmw_bo_fence_single(val_buf->bo, fence); fence 488 drivers/gpu/drm/vmwgfx/vmwgfx_surface.c if (likely(fence != NULL)) fence 489 drivers/gpu/drm/vmwgfx/vmwgfx_surface.c vmw_fence_obj_unreference(&fence); fence 1184 drivers/gpu/drm/vmwgfx/vmwgfx_surface.c struct vmw_fence_obj *fence; fence 1235 drivers/gpu/drm/vmwgfx/vmwgfx_surface.c &fence, NULL); fence 1237 drivers/gpu/drm/vmwgfx/vmwgfx_surface.c vmw_bo_fence_single(val_buf->bo, fence); fence 1239 drivers/gpu/drm/vmwgfx/vmwgfx_surface.c if (likely(fence != NULL)) fence 1240 drivers/gpu/drm/vmwgfx/vmwgfx_surface.c vmw_fence_obj_unreference(&fence); fence 776 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c struct vmw_fence_obj *fence) fence 778 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c vmw_validation_bo_fence(ctx, fence); fence 200 drivers/gpu/drm/vmwgfx/vmwgfx_validation.h struct vmw_fence_obj *fence) fence 203 drivers/gpu/drm/vmwgfx/vmwgfx_validation.h (void *) fence); fence 263 drivers/gpu/drm/vmwgfx/vmwgfx_validation.h struct vmw_fence_obj *fence); fence 85 drivers/gpu/host1x/cdma.c pb->fence = pb->size - 8; fence 142 drivers/gpu/host1x/cdma.c WARN_ON(pb->pos == pb->fence); fence 158 drivers/gpu/host1x/cdma.c pb->fence += slots * 8; fence 160 drivers/gpu/host1x/cdma.c if (pb->fence >= pb->size) fence 161 drivers/gpu/host1x/cdma.c pb->fence -= pb->size; fence 169 drivers/gpu/host1x/cdma.c unsigned int fence = pb->fence; fence 171 drivers/gpu/host1x/cdma.c if (pb->fence < pb->pos) fence 172 drivers/gpu/host1x/cdma.c fence += pb->size; fence 174 drivers/gpu/host1x/cdma.c return (fence - pb->pos) / 8; fence 37 drivers/gpu/host1x/cdma.h u32 fence; /* index we've written */ fence 407 drivers/infiniband/hw/bnxt_re/ib_verbs.c struct bnxt_re_fence_data *fence = &pd->fence; fence 408 drivers/infiniband/hw/bnxt_re/ib_verbs.c struct ib_mr *ib_mr = &fence->mr->ib_mr; fence 409 drivers/infiniband/hw/bnxt_re/ib_verbs.c struct bnxt_qplib_swqe *wqe = &fence->bind_wqe; fence 418 drivers/infiniband/hw/bnxt_re/ib_verbs.c wqe->bind.va = (u64)(unsigned long)fence->va; fence 419 drivers/infiniband/hw/bnxt_re/ib_verbs.c wqe->bind.length = fence->size; fence 426 drivers/infiniband/hw/bnxt_re/ib_verbs.c fence->bind_rkey = ib_inc_rkey(fence->mw->rkey); fence 435 drivers/infiniband/hw/bnxt_re/ib_verbs.c struct bnxt_re_fence_data *fence = &pd->fence; fence 436 drivers/infiniband/hw/bnxt_re/ib_verbs.c struct bnxt_qplib_swqe *fence_wqe = &fence->bind_wqe; fence 441 drivers/infiniband/hw/bnxt_re/ib_verbs.c wqe.bind.r_key = fence->bind_rkey; fence 442 drivers/infiniband/hw/bnxt_re/ib_verbs.c fence->bind_rkey = ib_inc_rkey(fence->bind_rkey); fence 459 drivers/infiniband/hw/bnxt_re/ib_verbs.c struct bnxt_re_fence_data *fence = &pd->fence; fence 462 drivers/infiniband/hw/bnxt_re/ib_verbs.c struct bnxt_re_mr *mr = fence->mr; fence 464 drivers/infiniband/hw/bnxt_re/ib_verbs.c if (fence->mw) { fence 465 drivers/infiniband/hw/bnxt_re/ib_verbs.c bnxt_re_dealloc_mw(fence->mw); fence 466 drivers/infiniband/hw/bnxt_re/ib_verbs.c fence->mw = NULL; fence 475 drivers/infiniband/hw/bnxt_re/ib_verbs.c fence->mr = NULL; fence 477 drivers/infiniband/hw/bnxt_re/ib_verbs.c if (fence->dma_addr) { fence 478 drivers/infiniband/hw/bnxt_re/ib_verbs.c dma_unmap_single(dev, fence->dma_addr, BNXT_RE_FENCE_BYTES, fence 480 drivers/infiniband/hw/bnxt_re/ib_verbs.c fence->dma_addr = 0; fence 487 drivers/infiniband/hw/bnxt_re/ib_verbs.c struct bnxt_re_fence_data *fence = &pd->fence; fence 496 drivers/infiniband/hw/bnxt_re/ib_verbs.c dma_addr = dma_map_single(dev, fence->va, BNXT_RE_FENCE_BYTES, fence 502 drivers/infiniband/hw/bnxt_re/ib_verbs.c fence->dma_addr = 0; fence 505 drivers/infiniband/hw/bnxt_re/ib_verbs.c fence->dma_addr = dma_addr; fence 513 drivers/infiniband/hw/bnxt_re/ib_verbs.c fence->mr = mr; fence 526 drivers/infiniband/hw/bnxt_re/ib_verbs.c mr->qplib_mr.va = (u64)(unsigned long)fence->va; fence 545 drivers/infiniband/hw/bnxt_re/ib_verbs.c fence->mw = mw; fence 62 drivers/infiniband/hw/bnxt_re/ib_verbs.h struct bnxt_re_fence_data fence; fence 4934 drivers/infiniband/hw/mlx5/qp.c unsigned int idx, u64 wr_id, int nreq, u8 fence, fence 4942 drivers/infiniband/hw/mlx5/qp.c ctrl->fm_ce_se |= fence; fence 4986 drivers/infiniband/hw/mlx5/qp.c u8 fence; fence 5029 drivers/infiniband/hw/mlx5/qp.c fence = dev->umr_fence; fence 5034 drivers/infiniband/hw/mlx5/qp.c fence = MLX5_FENCE_MODE_SMALL_AND_FENCE; fence 5036 drivers/infiniband/hw/mlx5/qp.c fence = MLX5_FENCE_MODE_FENCE; fence 5038 drivers/infiniband/hw/mlx5/qp.c fence = qp->next_fence; fence 5111 drivers/infiniband/hw/mlx5/qp.c nreq, fence, fence 5154 drivers/infiniband/hw/mlx5/qp.c wr->wr_id, nreq, fence, fence 5310 drivers/infiniband/hw/mlx5/qp.c fence, mlx5_ib_opcode[wr->opcode]); fence 19 drivers/misc/habanalabs/command_submission.c static const char *hl_fence_get_driver_name(struct dma_fence *fence) fence 24 drivers/misc/habanalabs/command_submission.c static const char *hl_fence_get_timeline_name(struct dma_fence *fence) fence 27 drivers/misc/habanalabs/command_submission.c container_of(fence, struct hl_dma_fence, base_fence); fence 32 drivers/misc/habanalabs/command_submission.c static bool hl_fence_enable_signaling(struct dma_fence *fence) fence 37 drivers/misc/habanalabs/command_submission.c static void hl_fence_release(struct dma_fence *fence) fence 40 drivers/misc/habanalabs/command_submission.c container_of(fence, struct hl_dma_fence, base_fence); fence 243 drivers/misc/habanalabs/command_submission.c dma_fence_set_error(cs->fence, -ETIMEDOUT); fence 245 drivers/misc/habanalabs/command_submission.c dma_fence_set_error(cs->fence, -EIO); fence 247 drivers/misc/habanalabs/command_submission.c dma_fence_signal(cs->fence); fence 248 drivers/misc/habanalabs/command_submission.c dma_fence_put(cs->fence); fence 287 drivers/misc/habanalabs/command_submission.c struct hl_dma_fence *fence; fence 304 drivers/misc/habanalabs/command_submission.c fence = kmalloc(sizeof(*fence), GFP_ATOMIC); fence 305 drivers/misc/habanalabs/command_submission.c if (!fence) { fence 310 drivers/misc/habanalabs/command_submission.c fence->hdev = hdev; fence 311 drivers/misc/habanalabs/command_submission.c spin_lock_init(&fence->lock); fence 312 drivers/misc/habanalabs/command_submission.c cs->fence = &fence->base_fence; fence 316 drivers/misc/habanalabs/command_submission.c fence->cs_seq = ctx->cs_sequence; fence 317 drivers/misc/habanalabs/command_submission.c other = ctx->cs_pending[fence->cs_seq & (HL_MAX_PENDING_CS - 1)]; fence 326 drivers/misc/habanalabs/command_submission.c dma_fence_init(&fence->base_fence, &hl_fence_ops, &fence->lock, fence 329 drivers/misc/habanalabs/command_submission.c cs->sequence = fence->cs_seq; fence 331 drivers/misc/habanalabs/command_submission.c ctx->cs_pending[fence->cs_seq & (HL_MAX_PENDING_CS - 1)] = fence 332 drivers/misc/habanalabs/command_submission.c &fence->base_fence; fence 335 drivers/misc/habanalabs/command_submission.c dma_fence_get(&fence->base_fence); fence 346 drivers/misc/habanalabs/command_submission.c kfree(fence); fence 739 drivers/misc/habanalabs/command_submission.c struct dma_fence *fence; fence 750 drivers/misc/habanalabs/command_submission.c fence = hl_ctx_get_fence(ctx, seq); fence 751 drivers/misc/habanalabs/command_submission.c if (IS_ERR(fence)) { fence 752 drivers/misc/habanalabs/command_submission.c rc = PTR_ERR(fence); fence 753 drivers/misc/habanalabs/command_submission.c } else if (fence) { fence 754 drivers/misc/habanalabs/command_submission.c rc = dma_fence_wait_timeout(fence, true, timeout); fence 755 drivers/misc/habanalabs/command_submission.c if (fence->error == -ETIMEDOUT) fence 757 drivers/misc/habanalabs/command_submission.c else if (fence->error == -EIO) fence 759 drivers/misc/habanalabs/command_submission.c dma_fence_put(fence); fence 174 drivers/misc/habanalabs/context.c struct dma_fence *fence; fence 195 drivers/misc/habanalabs/context.c fence = dma_fence_get( fence 199 drivers/misc/habanalabs/context.c return fence; fence 99 drivers/misc/habanalabs/firmware_if.c rc = hl_poll_timeout_memory(hdev, &pkt->fence, tmp, fence 732 drivers/misc/habanalabs/habanalabs.h struct dma_fence *fence; fence 236 drivers/misc/habanalabs/include/armcp_if.h __le32 fence; /* Signal to host that message is completed */ fence 66 drivers/misc/mic/scif/scif_fence.c struct scif_remote_fence_info *fence; fence 73 drivers/misc/mic/scif/scif_fence.c fence = kmalloc(sizeof(*fence), GFP_KERNEL); fence 74 drivers/misc/mic/scif/scif_fence.c if (!fence) { fence 82 drivers/misc/mic/scif/scif_fence.c memcpy(&fence->msg, msg, sizeof(struct scifmsg)); fence 83 drivers/misc/mic/scif/scif_fence.c INIT_LIST_HEAD(&fence->list); fence 88 drivers/misc/mic/scif/scif_fence.c list_add_tail(&fence->list, &scif_info.fence); fence 367 drivers/misc/mic/scif/scif_fence.c struct scif_remote_fence_info *fence; fence 373 drivers/misc/mic/scif/scif_fence.c list_for_each_safe(item, tmp, &scif_info.fence) { fence 374 drivers/misc/mic/scif/scif_fence.c fence = list_entry(item, struct scif_remote_fence_info, fence 377 drivers/misc/mic/scif/scif_fence.c list_del(&fence->list); fence 380 drivers/misc/mic/scif/scif_fence.c ep = (struct scif_endpt *)fence->msg.payload[0]; fence 381 drivers/misc/mic/scif/scif_fence.c mark = fence->msg.payload[2]; fence 384 drivers/misc/mic/scif/scif_fence.c fence->msg.uop = SCIF_WAIT_NACK; fence 386 drivers/misc/mic/scif/scif_fence.c fence->msg.uop = SCIF_WAIT_ACK; fence 387 drivers/misc/mic/scif/scif_fence.c fence->msg.payload[0] = ep->remote_ep; fence 388 drivers/misc/mic/scif/scif_fence.c scif_nodeqp_send(ep->remote_dev, &fence->msg); fence 389 drivers/misc/mic/scif/scif_fence.c kfree(fence); fence 274 drivers/misc/mic/scif/scif_main.c INIT_LIST_HEAD(&scif_info.fence); fence 111 drivers/misc/mic/scif/scif_main.h struct list_head fence; fence 54 drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c bool fence) fence 66 drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c cseg->fm_ce_se = fence ? MLX5_FENCE_MODE_INITIATOR_SMALL : 0; fence 88 drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c bool fence) fence 100 drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c cseg->fm_ce_se = fence ? MLX5_FENCE_MODE_INITIATOR_SMALL : 0; fence 135 drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c bool fence) fence 141 drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c build_static_params(umr_wqe, sq->pc, sq->sqn, priv_tx, fence); fence 149 drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c bool fence) fence 155 drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c build_progress_params(wqe, sq->pc, sq->sqn, priv_tx, fence); fence 393 drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c bool fence = !(i || frag_offset); fence 399 drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c if (tx_post_resync_dump(sq, f, priv_tx->tisn, fence)) { fence 270 drivers/scsi/smartpqi/smartpqi.h u8 fence : 1; fence 293 drivers/scsi/smartpqi/smartpqi.h u8 fence : 1; fence 395 drivers/scsi/smartpqi/smartpqi.h u8 fence : 1; fence 229 drivers/video/fbdev/intelfb/intelfb.h u32 fence[8]; fence 643 drivers/video/fbdev/intelfb/intelfbhw.c hw->fence[i] = INREG(FENCE + (i << 2)); fence 862 drivers/video/fbdev/intelfb/intelfbhw.c hw->fence[i]); fence 95 fs/ocfs2/cluster/quorum.c int lowest_hb, lowest_reachable = 0, fence = 0; fence 122 fs/ocfs2/cluster/quorum.c fence = 1; fence 136 fs/ocfs2/cluster/quorum.c fence = 1; fence 145 fs/ocfs2/cluster/quorum.c fence = 1; fence 150 fs/ocfs2/cluster/quorum.c if (fence) { fence 53 include/drm/drm_atomic_uapi.h struct dma_fence *fence); fence 124 include/drm/drm_file.h struct dma_fence *fence; fence 400 include/drm/drm_gem.h struct dma_fence *fence); fence 80 include/drm/drm_plane.h struct dma_fence *fence; fence 51 include/drm/drm_syncobj.h struct dma_fence __rcu *fence; fence 105 include/drm/drm_syncobj.h struct dma_fence *fence; fence 108 include/drm/drm_syncobj.h fence = dma_fence_get_rcu_safe(&syncobj->fence); fence 111 include/drm/drm_syncobj.h return fence; fence 118 include/drm/drm_syncobj.h struct dma_fence *fence, fence 121 include/drm/drm_syncobj.h struct dma_fence *fence); fence 124 include/drm/drm_syncobj.h struct dma_fence **fence); fence 127 include/drm/drm_syncobj.h struct dma_fence *fence); fence 301 include/drm/gpu_scheduler.h bool drm_sched_dependency_optimized(struct dma_fence* fence, fence 328 include/drm/gpu_scheduler.h void drm_sched_fence_scheduled(struct drm_sched_fence *fence); fence 329 include/drm/gpu_scheduler.h void drm_sched_fence_finished(struct drm_sched_fence *fence); fence 866 include/drm/ttm/ttm_bo_driver.h struct dma_fence *fence, bool evict, fence 881 include/drm/ttm/ttm_bo_driver.h struct dma_fence *fence, bool evict, fence 119 include/drm/ttm/ttm_execbuf_util.h struct dma_fence *fence); fence 56 include/linux/dma-fence-array.h static inline bool dma_fence_is_array(struct dma_fence *fence) fence 58 include/linux/dma-fence-array.h return fence->ops == &dma_fence_array_ops; fence 69 include/linux/dma-fence-array.h to_dma_fence_array(struct dma_fence *fence) fence 71 include/linux/dma-fence-array.h if (fence->ops != &dma_fence_array_ops) fence 74 include/linux/dma-fence-array.h return container_of(fence, struct dma_fence_array, base); fence 82 include/linux/dma-fence-array.h bool dma_fence_match_context(struct dma_fence *fence, u64 context); fence 31 include/linux/dma-fence-chain.h struct dma_fence *fence; fence 46 include/linux/dma-fence-chain.h to_dma_fence_chain(struct dma_fence *fence) fence 48 include/linux/dma-fence-chain.h if (!fence || fence->ops != &dma_fence_chain_ops) fence 51 include/linux/dma-fence-chain.h return container_of(fence, struct dma_fence_chain, base); fence 66 include/linux/dma-fence-chain.h struct dma_fence *dma_fence_chain_walk(struct dma_fence *fence); fence 70 include/linux/dma-fence-chain.h struct dma_fence *fence, fence 105 include/linux/dma-fence.h typedef void (*dma_fence_func_t)(struct dma_fence *fence, fence 143 include/linux/dma-fence.h const char * (*get_driver_name)(struct dma_fence *fence); fence 155 include/linux/dma-fence.h const char * (*get_timeline_name)(struct dma_fence *fence); fence 192 include/linux/dma-fence.h bool (*enable_signaling)(struct dma_fence *fence); fence 209 include/linux/dma-fence.h bool (*signaled)(struct dma_fence *fence); fence 231 include/linux/dma-fence.h signed long (*wait)(struct dma_fence *fence, fence 242 include/linux/dma-fence.h void (*release)(struct dma_fence *fence); fence 252 include/linux/dma-fence.h void (*fence_value_str)(struct dma_fence *fence, char *str, int size); fence 262 include/linux/dma-fence.h void (*timeline_value_str)(struct dma_fence *fence, fence 266 include/linux/dma-fence.h void dma_fence_init(struct dma_fence *fence, const struct dma_fence_ops *ops, fence 270 include/linux/dma-fence.h void dma_fence_free(struct dma_fence *fence); fence 276 include/linux/dma-fence.h static inline void dma_fence_put(struct dma_fence *fence) fence 278 include/linux/dma-fence.h if (fence) fence 279 include/linux/dma-fence.h kref_put(&fence->refcount, dma_fence_release); fence 288 include/linux/dma-fence.h static inline struct dma_fence *dma_fence_get(struct dma_fence *fence) fence 290 include/linux/dma-fence.h if (fence) fence 291 include/linux/dma-fence.h kref_get(&fence->refcount); fence 292 include/linux/dma-fence.h return fence; fence 302 include/linux/dma-fence.h static inline struct dma_fence *dma_fence_get_rcu(struct dma_fence *fence) fence 304 include/linux/dma-fence.h if (kref_get_unless_zero(&fence->refcount)) fence 305 include/linux/dma-fence.h return fence; fence 330 include/linux/dma-fence.h struct dma_fence *fence; fence 332 include/linux/dma-fence.h fence = rcu_dereference(*fencep); fence 333 include/linux/dma-fence.h if (!fence) fence 336 include/linux/dma-fence.h if (!dma_fence_get_rcu(fence)) fence 353 include/linux/dma-fence.h if (fence == rcu_access_pointer(*fencep)) fence 354 include/linux/dma-fence.h return rcu_pointer_handoff(fence); fence 356 include/linux/dma-fence.h dma_fence_put(fence); fence 360 include/linux/dma-fence.h int dma_fence_signal(struct dma_fence *fence); fence 361 include/linux/dma-fence.h int dma_fence_signal_locked(struct dma_fence *fence); fence 362 include/linux/dma-fence.h signed long dma_fence_default_wait(struct dma_fence *fence, fence 364 include/linux/dma-fence.h int dma_fence_add_callback(struct dma_fence *fence, fence 367 include/linux/dma-fence.h bool dma_fence_remove_callback(struct dma_fence *fence, fence 369 include/linux/dma-fence.h void dma_fence_enable_sw_signaling(struct dma_fence *fence); fence 386 include/linux/dma-fence.h dma_fence_is_signaled_locked(struct dma_fence *fence) fence 388 include/linux/dma-fence.h if (test_bit(DMA_FENCE_FLAG_SIGNALED_BIT, &fence->flags)) fence 391 include/linux/dma-fence.h if (fence->ops->signaled && fence->ops->signaled(fence)) { fence 392 include/linux/dma-fence.h dma_fence_signal_locked(fence); fence 416 include/linux/dma-fence.h dma_fence_is_signaled(struct dma_fence *fence) fence 418 include/linux/dma-fence.h if (test_bit(DMA_FENCE_FLAG_SIGNALED_BIT, &fence->flags)) fence 421 include/linux/dma-fence.h if (fence->ops->signaled && fence->ops->signaled(fence)) { fence 422 include/linux/dma-fence.h dma_fence_signal(fence); fence 508 include/linux/dma-fence.h static inline int dma_fence_get_status_locked(struct dma_fence *fence) fence 510 include/linux/dma-fence.h if (dma_fence_is_signaled_locked(fence)) fence 511 include/linux/dma-fence.h return fence->error ?: 1; fence 516 include/linux/dma-fence.h int dma_fence_get_status(struct dma_fence *fence); fence 529 include/linux/dma-fence.h static inline void dma_fence_set_error(struct dma_fence *fence, fence 532 include/linux/dma-fence.h WARN_ON(test_bit(DMA_FENCE_FLAG_SIGNALED_BIT, &fence->flags)); fence 535 include/linux/dma-fence.h fence->error = error; fence 560 include/linux/dma-fence.h static inline signed long dma_fence_wait(struct dma_fence *fence, bool intr) fence 568 include/linux/dma-fence.h ret = dma_fence_wait_timeout(fence, intr, MAX_SCHEDULE_TIMEOUT); fence 77 include/linux/dma-resv.h struct dma_resv_list __rcu *fence; fence 93 include/linux/dma-resv.h return rcu_dereference_protected(obj->fence, fence 222 include/linux/dma-resv.h if (rcu_access_pointer(obj->fence)) { fence 223 include/linux/dma-resv.h struct dma_resv_list *fence = dma_resv_get_list(obj); fence 225 include/linux/dma-resv.h fence->shared_max = fence->shared_count; fence 264 include/linux/dma-resv.h struct dma_fence *fence; fence 270 include/linux/dma-resv.h fence = dma_fence_get_rcu_safe(&obj->fence_excl); fence 273 include/linux/dma-resv.h return fence; fence 279 include/linux/dma-resv.h void dma_resv_add_shared_fence(struct dma_resv *obj, struct dma_fence *fence); fence 281 include/linux/dma-resv.h void dma_resv_add_excl_fence(struct dma_resv *obj, struct dma_fence *fence); fence 42 include/linux/seqno-fence.h to_seqno_fence(struct dma_fence *fence) fence 44 include/linux/seqno-fence.h if (fence->ops != &seqno_fence_ops) fence 46 include/linux/seqno-fence.h return container_of(fence, struct seqno_fence, base); fence 87 include/linux/seqno-fence.h seqno_fence_init(struct seqno_fence *fence, spinlock_t *lock, fence 93 include/linux/seqno-fence.h BUG_ON(!fence || !sync_buf || !ops); fence 101 include/linux/seqno-fence.h fence->ops = ops; fence 102 include/linux/seqno-fence.h dma_fence_init(&fence->base, &seqno_fence_ops, lock, context, seqno); fence 104 include/linux/seqno-fence.h fence->sync_buf = sync_buf; fence 105 include/linux/seqno-fence.h fence->seqno_ofs = seqno_ofs; fence 106 include/linux/seqno-fence.h fence->condition = cond; fence 52 include/linux/sync_file.h struct dma_fence *fence; fence 58 include/linux/sync_file.h struct sync_file *sync_file_create(struct dma_fence *fence); fence 14 include/trace/events/dma_fence.h TP_PROTO(struct dma_fence *fence), fence 16 include/trace/events/dma_fence.h TP_ARGS(fence), fence 19 include/trace/events/dma_fence.h __string(driver, fence->ops->get_driver_name(fence)) fence 20 include/trace/events/dma_fence.h __string(timeline, fence->ops->get_timeline_name(fence)) fence 26 include/trace/events/dma_fence.h __assign_str(driver, fence->ops->get_driver_name(fence)) fence 27 include/trace/events/dma_fence.h __assign_str(timeline, fence->ops->get_timeline_name(fence)) fence 28 include/trace/events/dma_fence.h __entry->context = fence->context; fence 29 include/trace/events/dma_fence.h __entry->seqno = fence->seqno; fence 39 include/trace/events/dma_fence.h TP_PROTO(struct dma_fence *fence), fence 41 include/trace/events/dma_fence.h TP_ARGS(fence) fence 46 include/trace/events/dma_fence.h TP_PROTO(struct dma_fence *fence), fence 48 include/trace/events/dma_fence.h TP_ARGS(fence) fence 53 include/trace/events/dma_fence.h TP_PROTO(struct dma_fence *fence), fence 55 include/trace/events/dma_fence.h TP_ARGS(fence) fence 60 include/trace/events/dma_fence.h TP_PROTO(struct dma_fence *fence), fence 62 include/trace/events/dma_fence.h TP_ARGS(fence) fence 67 include/trace/events/dma_fence.h TP_PROTO(struct dma_fence *fence), fence 69 include/trace/events/dma_fence.h TP_ARGS(fence) fence 74 include/trace/events/dma_fence.h TP_PROTO(struct dma_fence *fence), fence 76 include/trace/events/dma_fence.h TP_ARGS(fence) fence 81 include/trace/events/dma_fence.h TP_PROTO(struct dma_fence *fence), fence 83 include/trace/events/dma_fence.h TP_ARGS(fence) fence 634 include/uapi/drm/amdgpu_drm.h struct drm_amdgpu_fence fence; fence 195 include/uapi/drm/etnaviv_drm.h __u32 fence; /* out */ fence 221 include/uapi/drm/etnaviv_drm.h __u32 fence; /* in */ fence 233 include/uapi/drm/msm_drm.h __u32 fence; /* out */ fence 250 include/uapi/drm/msm_drm.h __u32 fence; /* in */ fence 514 include/uapi/drm/tegra_drm.h __u32 fence; fence 54 include/uapi/drm/vgem_drm.h __u32 fence; fence 29 include/uapi/linux/sync_file.h __s32 fence; fence 49 tools/testing/selftests/sync/sync.c __s32 fence; fence 81 tools/testing/selftests/sync/sync.c return data.fence; fence 208 tools/testing/selftests/sync/sync.c return data.fence; fence 46 tools/testing/selftests/sync/sync_alloc.c int timeline, fence, valid; fence 52 tools/testing/selftests/sync/sync_alloc.c fence = sw_sync_fence_create(timeline, "allocFence", 1); fence 53 tools/testing/selftests/sync/sync_alloc.c valid = sw_sync_fence_is_valid(fence); fence 56 tools/testing/selftests/sync/sync_alloc.c sw_sync_fence_destroy(fence); fence 63 tools/testing/selftests/sync/sync_alloc.c int fence, timeline; fence 68 tools/testing/selftests/sync/sync_alloc.c fence = sw_sync_fence_create(-1, "fence", 1); fence 69 tools/testing/selftests/sync/sync_alloc.c ASSERT(fence < 0, "Success allocating negative fence\n"); fence 71 tools/testing/selftests/sync/sync_alloc.c sw_sync_fence_destroy(fence); fence 34 tools/testing/selftests/sync/sync_fence.c int fence, valid, ret; fence 40 tools/testing/selftests/sync/sync_fence.c fence = sw_sync_fence_create(timeline, "allocFence", 5); fence 41 tools/testing/selftests/sync/sync_fence.c valid = sw_sync_fence_is_valid(fence); fence 45 tools/testing/selftests/sync/sync_fence.c ret = sync_wait(fence, 0); fence 53 tools/testing/selftests/sync/sync_fence.c ret = sync_wait(fence, 0); fence 61 tools/testing/selftests/sync/sync_fence.c ret = sync_wait(fence, 0); fence 67 tools/testing/selftests/sync/sync_fence.c ret = sync_wait(fence, 0); fence 70 tools/testing/selftests/sync/sync_fence.c sw_sync_fence_destroy(fence); fence 34 tools/testing/selftests/sync/sync_merge.c int fence, valid, merged; fence 40 tools/testing/selftests/sync/sync_merge.c fence = sw_sync_fence_create(timeline, "allocFence", 5); fence 41 tools/testing/selftests/sync/sync_merge.c valid = sw_sync_fence_is_valid(fence); fence 44 tools/testing/selftests/sync/sync_merge.c merged = sync_merge("mergeFence", fence, fence); fence 45 tools/testing/selftests/sync/sync_merge.c valid = sw_sync_fence_is_valid(fence); fence 56 tools/testing/selftests/sync/sync_merge.c sw_sync_fence_destroy(fence); fence 40 tools/testing/selftests/sync/sync_stress_consumer.c static int busy_wait_on_fence(int fence) fence 45 tools/testing/selftests/sync/sync_stress_consumer.c error = sync_fence_count_with_status(fence, FENCE_STATUS_ERROR); fence 47 tools/testing/selftests/sync/sync_stress_consumer.c active = sync_fence_count_with_status(fence, fence 66 tools/testing/selftests/sync/sync_stress_consumer.c int fence, valid, i; fence 72 tools/testing/selftests/sync/sync_stress_consumer.c fence = sw_sync_fence_create(consumer_timeline, "fence", i); fence 73 tools/testing/selftests/sync/sync_stress_consumer.c valid = sw_sync_fence_is_valid(fence); fence 82 tools/testing/selftests/sync/sync_stress_consumer.c ASSERT(sync_wait(fence, -1) > 0, fence 85 tools/testing/selftests/sync/sync_stress_consumer.c ASSERT(busy_wait_on_fence(fence) == 0, fence 100 tools/testing/selftests/sync/sync_stress_consumer.c sw_sync_fence_destroy(fence); fence 108 tools/testing/selftests/sync/sync_stress_consumer.c int fence, merged, tmp, valid, it, i; fence 115 tools/testing/selftests/sync/sync_stress_consumer.c fence = sw_sync_fence_create(producer_timelines[0], "name", it); fence 119 tools/testing/selftests/sync/sync_stress_consumer.c merged = sync_merge("name", tmp, fence); fence 121 tools/testing/selftests/sync/sync_stress_consumer.c sw_sync_fence_destroy(fence); fence 122 tools/testing/selftests/sync/sync_stress_consumer.c fence = merged; fence 125 tools/testing/selftests/sync/sync_stress_consumer.c valid = sw_sync_fence_is_valid(fence); fence 133 tools/testing/selftests/sync/sync_stress_consumer.c ASSERT(sync_wait(fence, -1) > 0, fence 136 tools/testing/selftests/sync/sync_stress_consumer.c ASSERT(busy_wait_on_fence(fence) == 0, fence 147 tools/testing/selftests/sync/sync_stress_consumer.c sw_sync_fence_destroy(fence); fence 43 tools/testing/selftests/sync/sync_stress_merge.c int fence, tmpfence, merged, valid; fence 51 tools/testing/selftests/sync/sync_stress_merge.c fence = sw_sync_fence_create(timelines[0], "fence", 0); fence 52 tools/testing/selftests/sync/sync_stress_merge.c valid = sw_sync_fence_is_valid(fence); fence 76 tools/testing/selftests/sync/sync_stress_merge.c merged = sync_merge("merge", tmpfence, fence); fence 78 tools/testing/selftests/sync/sync_stress_merge.c sw_sync_fence_destroy(fence); fence 79 tools/testing/selftests/sync/sync_stress_merge.c fence = merged; fence 91 tools/testing/selftests/sync/sync_stress_merge.c ASSERT(sync_fence_size(fence) == size, fence 97 tools/testing/selftests/sync/sync_stress_merge.c ret = sync_wait(fence, 0); fence 106 tools/testing/selftests/sync/sync_stress_merge.c ret = sync_wait(fence, 0); fence 109 tools/testing/selftests/sync/sync_stress_merge.c sw_sync_fence_destroy(fence); fence 45 tools/testing/selftests/sync/sync_stress_parallelism.c int fence, valid, ret, i; fence 48 tools/testing/selftests/sync/sync_stress_parallelism.c fence = sw_sync_fence_create(timeline, "fence", fence 50 tools/testing/selftests/sync/sync_stress_parallelism.c valid = sw_sync_fence_is_valid(fence); fence 54 tools/testing/selftests/sync/sync_stress_parallelism.c ret = sync_wait(fence, -1); fence 69 tools/testing/selftests/sync/sync_stress_parallelism.c sw_sync_fence_destroy(fence);