Home
last modified time | relevance | path

Searched refs:ticket (Results 1 – 43 of 43) sorted by relevance

/linux-4.4.14/drivers/gpu/drm/ttm/
Dttm_execbuf_util.c57 void ttm_eu_backoff_reservation(struct ww_acquire_ctx *ticket, in ttm_eu_backoff_reservation() argument
78 if (ticket) in ttm_eu_backoff_reservation()
79 ww_acquire_fini(ticket); in ttm_eu_backoff_reservation()
95 int ttm_eu_reserve_buffers(struct ww_acquire_ctx *ticket, in ttm_eu_reserve_buffers() argument
109 if (ticket) in ttm_eu_reserve_buffers()
110 ww_acquire_init(ticket, &reservation_ww_class); in ttm_eu_reserve_buffers()
115 ret = __ttm_bo_reserve(bo, intr, (ticket == NULL), true, in ttm_eu_reserve_buffers()
116 ticket); in ttm_eu_reserve_buffers()
147 ticket); in ttm_eu_reserve_buffers()
149 ww_mutex_lock_slow(&bo->resv->lock, ticket); in ttm_eu_reserve_buffers()
[all …]
/linux-4.4.14/fs/xfs/
Dxfs_log.c44 struct xlog_ticket *ticket,
78 struct xlog_ticket *ticket,
102 struct xlog_ticket *ticket);
106 struct xlog_ticket *ticket);
514 struct xlog_ticket *ticket, in xfs_log_done() argument
526 (((ticket->t_flags & XLOG_TIC_INITED) == 0) && in xfs_log_done()
527 (xlog_commit_record(log, ticket, iclog, &lsn)))) { in xfs_log_done()
534 trace_xfs_log_done_nonperm(log, ticket); in xfs_log_done()
540 xlog_ungrant_log_space(log, ticket); in xfs_log_done()
542 trace_xfs_log_done_perm(log, ticket); in xfs_log_done()
[all …]
Dxfs_log_cil.c78 log->l_cilp->xc_ctx->ticket = xlog_cil_ticket_alloc(log); in xlog_cil_init_post_recovery()
334 if (ctx->ticket->t_curr_res == 0) { in xlog_cil_insert_items()
335 ctx->ticket->t_curr_res = ctx->ticket->t_unit_res; in xlog_cil_insert_items()
336 tp->t_ticket->t_curr_res -= ctx->ticket->t_unit_res; in xlog_cil_insert_items()
348 ctx->ticket->t_unit_res += hdrs; in xlog_cil_insert_items()
349 ctx->ticket->t_curr_res += hdrs; in xlog_cil_insert_items()
452 new_ctx->ticket = xlog_cil_ticket_alloc(log); in xlog_cil_push()
580 tic = ctx->ticket; in xlog_cil_push()
659 xfs_log_ticket_put(new_ctx->ticket); in xlog_cil_push()
992 if (log->l_cilp->xc_ctx->ticket) in xlog_cil_destroy()
[all …]
Dxfs_log.h130 struct xlog_ticket *ticket,
162 struct xlog_ticket **ticket,
174 struct xlog_ticket *xfs_log_ticket_get(struct xlog_ticket *ticket);
175 void xfs_log_ticket_put(struct xlog_ticket *ticket);
Dxfs_log_priv.h253 struct xlog_ticket *ticket; /* chkpt ticket */ member
454 void xlog_print_tic_res(struct xfs_mount *mp, struct xlog_ticket *ticket);
/linux-4.4.14/arch/ia64/include/asm/
Dspinlock.h42 int *p = (int *)&lock->lock, ticket, serve; in __ticket_spin_lock() local
44 ticket = ia64_fetchadd(1, p, acq); in __ticket_spin_lock()
46 if (!(((ticket >> TICKET_SHIFT) ^ ticket) & TICKET_MASK)) in __ticket_spin_lock()
54 if (!(((serve >> TICKET_SHIFT) ^ ticket) & TICKET_MASK)) in __ticket_spin_lock()
79 int *p = (int *)&lock->lock, ticket; in __ticket_spin_unlock_wait() local
84 asm volatile ("ld4.c.nc %0=[%1]" : "=r"(ticket) : "r"(p) : "memory"); in __ticket_spin_unlock_wait()
85 if (!(((ticket >> TICKET_SHIFT) ^ ticket) & TICKET_MASK)) in __ticket_spin_unlock_wait()
/linux-4.4.14/include/drm/ttm/
Dttm_execbuf_util.h61 extern void ttm_eu_backoff_reservation(struct ww_acquire_ctx *ticket,
98 extern int ttm_eu_reserve_buffers(struct ww_acquire_ctx *ticket,
115 extern void ttm_eu_fence_buffer_objects(struct ww_acquire_ctx *ticket,
Dttm_bo_driver.h781 struct ww_acquire_ctx *ticket) in __ttm_bo_reserve() argument
787 if (WARN_ON(ticket)) in __ttm_bo_reserve()
795 ret = ww_mutex_lock_interruptible(&bo->resv->lock, ticket); in __ttm_bo_reserve()
797 ret = ww_mutex_lock(&bo->resv->lock, ticket); in __ttm_bo_reserve()
851 struct ww_acquire_ctx *ticket) in ttm_bo_reserve() argument
857 ret = __ttm_bo_reserve(bo, interruptible, no_wait, use_ticket, ticket); in ttm_bo_reserve()
876 struct ww_acquire_ctx *ticket) in ttm_bo_reserve_slowpath() argument
884 ticket); in ttm_bo_reserve_slowpath()
886 ww_mutex_lock_slow(&bo->resv->lock, ticket); in ttm_bo_reserve_slowpath()
/linux-4.4.14/arch/mips/include/asm/
Dspinlock.h47 return lock.h.serving_now == lock.h.ticket; in arch_spin_value_unlocked()
99 [ticket] "=&r" (tmp), in arch_spin_lock()
132 [ticket] "=&r" (tmp), in arch_spin_lock()
173 [ticket] "=&r" (tmp), in arch_spin_trylock()
197 [ticket] "=&r" (tmp), in arch_spin_trylock()
Dspinlock_types.h20 u16 ticket; member
24 u16 ticket;
/linux-4.4.14/net/rxrpc/
Dar-key.c131 memcpy(&token->kad->ticket, &xdr[8], tktlen); in rxrpc_preparse_xdr_rxkad()
145 token->kad->ticket[0], token->kad->ticket[1], in rxrpc_preparse_xdr_rxkad()
146 token->kad->ticket[2], token->kad->ticket[3], in rxrpc_preparse_xdr_rxkad()
147 token->kad->ticket[4], token->kad->ticket[5], in rxrpc_preparse_xdr_rxkad()
148 token->kad->ticket[6], token->kad->ticket[7]); in rxrpc_preparse_xdr_rxkad()
205 kfree(rxk5->ticket); in rxrpc_rxk5_free()
499 ret = rxrpc_krb5_decode_ticket(&rxk5->ticket, &rxk5->ticket_len, in rxrpc_preparse_xdr_rxk5()
739 v1->ticket[0], v1->ticket[1], in rxrpc_preparse()
740 v1->ticket[2], v1->ticket[3], in rxrpc_preparse()
741 v1->ticket[4], v1->ticket[5], in rxrpc_preparse()
[all …]
Drxkad.c658 iov[2].iov_base = (void *) s2->ticket; in rxkad_send_response()
820 void *ticket, size_t ticket_len, in rxkad_decrypt_ticket() argument
852 ASSERTCMP((unsigned long) ticket & 7UL, ==, 0); in rxkad_decrypt_ticket()
860 sg_init_one(&sg[0], ticket, ticket_len); in rxkad_decrypt_ticket()
863 p = ticket; in rxkad_decrypt_ticket()
1007 void *ticket; in rxkad_verify_response() local
1040 ticket = kmalloc(ticket_len, GFP_NOFS); in rxkad_verify_response()
1041 if (!ticket) in rxkad_verify_response()
1045 if (skb_copy_bits(skb, 0, ticket, ticket_len) < 0) in rxkad_verify_response()
1048 ret = rxkad_decrypt_ticket(conn, ticket, ticket_len, &session_key, in rxkad_verify_response()
[all …]
/linux-4.4.14/include/keys/
Drxrpc-type.h35 u8 ticket[0]; /* the encrypted ticket */ member
80 u8 *ticket; /* krb5 ticket */ member
107 u8 ticket[0]; member
/linux-4.4.14/drivers/gpu/drm/virtio/
Dvirtgpu_ioctl.c55 static int virtio_gpu_object_list_validate(struct ww_acquire_ctx *ticket, in virtio_gpu_object_list_validate() argument
63 ret = ttm_eu_reserve_buffers(ticket, head, true, NULL); in virtio_gpu_object_list_validate()
72 ttm_eu_backoff_reservation(ticket, head); in virtio_gpu_object_list_validate()
107 struct ww_acquire_ctx ticket; in virtio_gpu_execbuffer() local
152 ret = virtio_gpu_object_list_validate(&ticket, &validate_list); in virtio_gpu_execbuffer()
170 ttm_eu_fence_buffer_objects(&ticket, &validate_list, &fence->f); in virtio_gpu_execbuffer()
179 ttm_eu_backoff_reservation(&ticket, &validate_list); in virtio_gpu_execbuffer()
235 struct ww_acquire_ctx ticket; in virtio_gpu_resource_create_ioctl() local
280 ret = virtio_gpu_object_list_validate(&ticket, &validate_list); in virtio_gpu_resource_create_ioctl()
301 ttm_eu_backoff_reservation(&ticket, &validate_list); in virtio_gpu_resource_create_ioctl()
[all …]
/linux-4.4.14/arch/tile/include/asm/
Dspinlock_64.h72 u32 ticket = val & (__ARCH_SPIN_NEXT_MASK | __ARCH_SPIN_NEXT_OVERFLOW); in arch_spin_lock() local
73 if (unlikely(arch_spin_current(val) != ticket)) in arch_spin_lock()
74 arch_spin_lock_slow(lock, ticket); in arch_spin_lock()
/linux-4.4.14/drivers/gpu/drm/msm/
Dmsm_gem_submit.c52 ww_acquire_init(&submit->ticket, &reservation_ww_class); in submit_create()
157 &submit->ticket); in submit_validate_objects()
188 ww_acquire_done(&submit->ticket); in submit_validate_objects()
203 &submit->ticket); in submit_validate_objects()
316 ww_acquire_fini(&submit->ticket); in submit_cleanup()
Dmsm_gem.h101 struct ww_acquire_ctx ticket; member
/linux-4.4.14/drivers/gpu/drm/amd/amdgpu/
Damdgpu_gem.c452 struct ww_acquire_ctx ticket; in amdgpu_gem_va_update_vm() local
469 r = ttm_eu_reserve_buffers(&ticket, &list, true, &duplicates); in amdgpu_gem_va_update_vm()
500 ttm_eu_backoff_reservation(&ticket, &list); in amdgpu_gem_va_update_vm()
521 struct ww_acquire_ctx ticket; in amdgpu_gem_va_ioctl() local
570 r = ttm_eu_reserve_buffers(&ticket, &list, true, &duplicates); in amdgpu_gem_va_ioctl()
578 ttm_eu_backoff_reservation(&ticket, &list); in amdgpu_gem_va_ioctl()
601 ttm_eu_backoff_reservation(&ticket, &list); in amdgpu_gem_va_ioctl()
Damdgpu_uvd.c844 struct ww_acquire_ctx ticket; in amdgpu_uvd_send_msg() local
858 r = ttm_eu_reserve_buffers(&ticket, &head, true, NULL); in amdgpu_uvd_send_msg()
897 ttm_eu_fence_buffer_objects(&ticket, &head, f); in amdgpu_uvd_send_msg()
914 ttm_eu_backoff_reservation(&ticket, &head); in amdgpu_uvd_send_msg()
Damdgpu_cs.c419 r = ttm_eu_reserve_buffers(&p->ticket, &p->validated, true, &duplicates); in amdgpu_cs_parser_relocs()
431 ttm_eu_backoff_reservation(&p->ticket, &p->validated); in amdgpu_cs_parser_relocs()
490 ttm_eu_fence_buffer_objects(&parser->ticket, in amdgpu_cs_parser_fini()
494 ttm_eu_backoff_reservation(&parser->ticket, in amdgpu_cs_parser_fini()
Damdgpu.h1262 struct ww_acquire_ctx ticket; member
/linux-4.4.14/arch/mips/include/asm/octeon/
Dcvmx-cmd-queue.h154 uint64_t ticket[(CVMX_CMD_QUEUE_END >> 16) * 256]; member
278 …[ticket_ptr] "=" GCC_OFF_SMALL_ASM()(__cvmx_cmd_queue_state_ptr->ticket[__cvmx_cmd_queue_get_index… in __cvmx_cmd_queue_lock()
279 [now_serving] "=m"(qptr->now_serving), [ticket] "=r"(tmp), in __cvmx_cmd_queue_lock()
/linux-4.4.14/drivers/gpu/drm/qxl/
Dqxl_release.c267 ret = ttm_eu_reserve_buffers(&release->ticket, &release->bos, in qxl_release_reserve_list()
277 ttm_eu_backoff_reservation(&release->ticket, &release->bos); in qxl_release_reserve_list()
291 ttm_eu_backoff_reservation(&release->ticket, &release->bos); in qxl_release_backoff_reserve_list()
477 ww_acquire_fini(&release->ticket); in qxl_release_fence_buffer_objects()
Dqxl_drv.h198 struct ww_acquire_ctx ticket; member
/linux-4.4.14/arch/x86/include/asm/
Dspinlock.h58 __ticket_t ticket) in __ticket_lock_spinning() argument
62 __ticket_t ticket) in __ticket_unlock_kick() argument
Dparavirt.h717 __ticket_t ticket) in __ticket_lock_spinning() argument
719 PVOP_VCALLEE2(pv_lock_ops.lock_spinning, lock, ticket); in __ticket_lock_spinning()
723 __ticket_t ticket) in __ticket_unlock_kick() argument
725 PVOP_VCALL2(pv_lock_ops.unlock_kick, lock, ticket); in __ticket_unlock_kick()
Dparavirt_types.h350 void (*unlock_kick)(struct arch_spinlock *lock, __ticket_t ticket);
/linux-4.4.14/drivers/gpu/drm/nouveau/
Dnouveau_gem.c328 struct ww_acquire_ctx ticket; member
352 ttm_bo_unreserve_ticket(&nvbo->bo, &op->ticket); in validate_fini_no_ticket()
362 ww_acquire_fini(&op->ticket); in validate_fini()
379 ww_acquire_init(&op->ticket, &reservation_ww_class); in validate_init()
412 ret = ttm_bo_reserve(&nvbo->bo, true, false, true, &op->ticket); in validate_init()
420 &op->ticket); in validate_init()
454 ww_acquire_done(&op->ticket); in validate_init()
/linux-4.4.14/arch/ia64/mm/
Dtlb.c104 unsigned long ticket; member
110 ss->ticket = 0; in spinaphore_init()
116 unsigned long t = ia64_fetchadd(1, &ss->ticket, acq), serve; in down_spin()
/linux-4.4.14/arch/mips/cavium-octeon/executive/
Dcvmx-cmd-queue.c192 ticket[__cvmx_cmd_queue_get_index(queue_id)] = 0; in cvmx_cmd_queue_initialize()
/linux-4.4.14/drivers/gpu/drm/radeon/
Dradeon_object.h144 struct ww_acquire_ctx *ticket,
Dradeon_gem.c544 struct ww_acquire_ctx ticket; in radeon_gem_va_update_vm() local
559 r = ttm_eu_reserve_buffers(&ticket, &list, true, NULL); in radeon_gem_va_update_vm()
583 ttm_eu_backoff_reservation(&ticket, &list); in radeon_gem_va_update_vm()
Dradeon_object.c521 struct ww_acquire_ctx *ticket, in radeon_bo_list_validate() argument
531 r = ttm_eu_reserve_buffers(ticket, head, true, &duplicates); in radeon_bo_list_validate()
575 ttm_eu_backoff_reservation(ticket, head); in radeon_bo_list_validate()
Dradeon_cs.c182 r = radeon_bo_list_validate(p->rdev, &p->ticket, &p->validated, p->ring); in radeon_cs_parser_relocs()
413 ttm_eu_fence_buffer_objects(&parser->ticket, in radeon_cs_parser_fini()
417 ttm_eu_backoff_reservation(&parser->ticket, in radeon_cs_parser_fini()
Dradeon.h1100 struct ww_acquire_ctx ticket; member
/linux-4.4.14/Documentation/locking/
Dww-mutex-design.txt28 to be locked, the caller would be assigned a unique reservation id/ticket,
30 associated with a execbuf, the one with the lowest reservation ticket (i.e.
48 acquired when starting the lock acquisition. This ticket is stored in the
85 better to avoid grabbing a deadlock avoidance ticket).
/linux-4.4.14/arch/x86/kernel/
Dkvm.c837 static void kvm_unlock_kick(struct arch_spinlock *lock, __ticket_t ticket) in kvm_unlock_kick() argument
845 READ_ONCE(w->want) == ticket) { in kvm_unlock_kick()
/linux-4.4.14/security/keys/
DKconfig46 (for example Kerberos ticket caches). The data may be stored out to
/linux-4.4.14/drivers/gpu/drm/vmwgfx/
Dvmwgfx_execbuf.c3907 struct ww_acquire_ctx ticket; in vmw_execbuf_process() local
3999 ret = ttm_eu_reserve_buffers(&ticket, &sw_context->validate_nodes, in vmw_execbuf_process()
4051 ttm_eu_fence_buffer_objects(&ticket, &sw_context->validate_nodes, in vmw_execbuf_process()
4085 ttm_eu_backoff_reservation(&ticket, &sw_context->validate_nodes); in vmw_execbuf_process()
4168 struct ww_acquire_ctx ticket; in __vmw_execbuf_release_pinned_bo() local
4183 ret = ttm_eu_reserve_buffers(&ticket, &validate_list, in __vmw_execbuf_release_pinned_bo()
4210 ttm_eu_fence_buffer_objects(&ticket, &validate_list, (void *) fence); in __vmw_execbuf_release_pinned_bo()
4224 ttm_eu_backoff_reservation(&ticket, &validate_list); in __vmw_execbuf_release_pinned_bo()
/linux-4.4.14/Documentation/networking/
Drxrpc.txt455 uint16_t ticket_length; /* length of ticket[] */
460 uint8_t ticket[0]; /* the encrypted ticket */
463 Where the ticket blob is just appended to the above structure.
/linux-4.4.14/Documentation/filesystems/
Dafs.txt203 Currently the username, realm, password and proposed ticket lifetime are
/linux-4.4.14/Documentation/hwmon/
Dw83781d423 ticket #2350:
/linux-4.4.14/Documentation/security/
Dkeys.txt426 ID and a colon, such as "krb5tgt:" for a Kerberos 5 ticket granting
427 ticket.
1108 filesystem might want to define a Kerberos 5 ticket key type. To do this, it