resv 496 arch/sparc/include/asm/hypervisor.h unsigned long resv; resv 44 arch/sparc/include/asm/vio.h u64 resv[6]; resv 71 arch/sparc/include/asm/vio.h u16 resv; resv 79 arch/sparc/include/asm/vio.h u64 resv[5]; resv 179 arch/sparc/include/asm/vio.h u32 resv; resv 200 arch/sparc/include/asm/vio.h u16 resv; resv 242 arch/sparc/include/asm/vio.h u32 resv; resv 55 arch/sparc/kernel/mdesc.c u16 resv; resv 101 arch/sparc/kernel/pci_fire.c u64 resv[6]; resv 2174 arch/sparc/mm/init_64.c ktsb_descr[0].resv = 0; resv 2191 arch/sparc/mm/init_64.c ktsb_descr[1].resv = 0; resv 322 arch/sparc/mm/tsb.c hp->resv = 0; resv 801 drivers/acpi/arm64/iort.c int i, resv = 0; resv 841 drivers/acpi/arm64/iort.c resv++; resv 846 drivers/acpi/arm64/iort.c return (resv == its->its_count) ? resv : -ENODEV; resv 186 drivers/crypto/chelsio/chcr_core.h u16 resv; resv 107 drivers/dma-buf/dma-buf.c if (dmabuf->resv == (struct dma_resv *)&dmabuf[1]) resv 108 drivers/dma-buf/dma-buf.c dma_resv_fini(dmabuf->resv); resv 199 drivers/dma-buf/dma-buf.c struct dma_resv *resv; resv 206 drivers/dma-buf/dma-buf.c if (!dmabuf || !dmabuf->resv) resv 209 drivers/dma-buf/dma-buf.c resv = dmabuf->resv; resv 218 drivers/dma-buf/dma-buf.c seq = read_seqcount_begin(&resv->seq); resv 221 drivers/dma-buf/dma-buf.c fobj = rcu_dereference(resv->fence); resv 226 drivers/dma-buf/dma-buf.c fence_excl = rcu_dereference(resv->fence_excl); resv 227 drivers/dma-buf/dma-buf.c if (read_seqcount_retry(&resv->seq, seq)) { resv 511 drivers/dma-buf/dma-buf.c struct dma_resv *resv = exp_info->resv; resv 516 drivers/dma-buf/dma-buf.c if (!exp_info->resv) resv 548 drivers/dma-buf/dma-buf.c if (!resv) { resv 549 drivers/dma-buf/dma-buf.c resv = (struct dma_resv *)&dmabuf[1]; resv 550 drivers/dma-buf/dma-buf.c dma_resv_init(resv); resv 552 drivers/dma-buf/dma-buf.c dmabuf->resv = resv; resv 914 drivers/dma-buf/dma-buf.c struct dma_resv *resv = dmabuf->resv; resv 918 drivers/dma-buf/dma-buf.c ret = dma_resv_wait_timeout_rcu(resv, write, true, resv 1192 drivers/dma-buf/dma-buf.c robj = buf_obj->resv; resv 284 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c bp.resv = NULL; resv 365 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c bp.resv = NULL; resv 221 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c struct dma_resv *resv = bo->tbo.base.resv; resv 228 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c old = dma_resv_get_list(resv); resv 244 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c dma_resv_held(resv)); resv 256 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c write_seqcount_begin(&resv->seq); resv 257 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c RCU_INIT_POINTER(resv->fence, new); resv 258 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c write_seqcount_end(&resv->seq); resv 266 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c dma_resv_held(resv)); resv 815 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c sync, pd->tbo.base.resv, resv 890 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c ret = dma_resv_reserve_shared(vm->root.base.bo->tbo.base.resv, 1); resv 1175 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c bp.resv = NULL; resv 2137 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c ret = dma_resv_reserve_shared(gws_bo->tbo.base.resv, 1); resv 89 drivers/gpu/drm/amd/amdgpu/amdgpu_benchmark.c bp.resv = NULL; resv 405 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c .resv = bo->tbo.base.resv, resv 729 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c struct dma_resv *resv = bo->tbo.base.resv; resv 731 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c r = amdgpu_sync_resv(p->adev, &p->job->sync, resv, p->filp, resv 1731 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c if (dma_resv_locking_ctx((*bo)->tbo.base.resv) != &parser->ticket) resv 208 drivers/gpu/drm/amd/amdgpu/amdgpu_display.c r = dma_resv_get_fences_rcu(new_abo->tbo.base.resv, &work->excl, resv 219 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c r = __dma_resv_make_exclusive(bo->tbo.base.resv); resv 370 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c struct dma_resv *resv = attach->dmabuf->resv; resv 382 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c bp.resv = resv; resv 383 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c dma_resv_lock(resv, NULL); resv 395 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c dma_resv_unlock(resv); resv 399 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c dma_resv_unlock(resv); resv 128 drivers/gpu/drm/amd/amdgpu/amdgpu_gart.c bp.resv = NULL; resv 53 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c struct dma_resv *resv, resv 66 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c bp.resv = resv; resv 137 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c abo->tbo.base.resv != vm->root.base.bo->tbo.base.resv) resv 218 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c struct dma_resv *resv = NULL; resv 255 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c resv = vm->root.base.bo->tbo.base.resv; resv 260 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c flags, ttm_bo_type_device, resv, &gobj); resv 438 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c ret = dma_resv_wait_timeout_rcu(robj->tbo.base.resv, true, true, resv 50 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.h struct dma_resv *resv, resv 107 drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c void amdgpu_pasid_free_delayed(struct dma_resv *resv, resv 115 drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c r = dma_resv_get_fences_rcu(resv, NULL, &count, &fences); resv 159 drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c dma_resv_wait_timeout_rcu(resv, true, false, resv 75 drivers/gpu/drm/amd/amdgpu/amdgpu_ids.h void amdgpu_pasid_free_delayed(struct dma_resv *resv, resv 1079 drivers/gpu/drm/amd/amdgpu/amdgpu_kms.c amdgpu_pasid_free_delayed(pd->tbo.base.resv, pasid); resv 182 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c r = dma_resv_wait_timeout_rcu(bo->tbo.base.resv, resv 253 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c bp.resv = NULL; resv 516 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c .resv = bp->resv, resv 577 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c NULL, bp->resv, &amdgpu_bo_destroy); resv 593 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c r = amdgpu_fill_buffer(bo, 0, bo->tbo.base.resv, &fence); resv 602 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c if (!bp->resv) resv 615 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c if (!bp->resv) resv 616 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c dma_resv_unlock(bo->tbo.base.resv); resv 637 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c bp.resv = bo->tbo.base.resv; resv 677 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c if (!bp->resv) resv 678 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c WARN_ON(dma_resv_lock((*bo_ptr)->tbo.base.resv, resv 683 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c if (!bp->resv) resv 684 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c dma_resv_unlock((*bo_ptr)->tbo.base.resv); resv 781 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c r = dma_resv_wait_timeout_rcu(bo->tbo.base.resv, false, false, resv 1159 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c dma_resv_assert_held(bo->tbo.base.resv); resv 1308 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c dma_resv_lock(bo->base.resv, NULL); resv 1310 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c r = amdgpu_fill_buffer(abo, AMDGPU_POISON, bo->base.resv, &fence); resv 1316 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c dma_resv_unlock(bo->base.resv); resv 1391 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c struct dma_resv *resv = bo->tbo.base.resv; resv 1394 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c dma_resv_add_shared_fence(resv, fence); resv 1396 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c dma_resv_add_excl_fence(resv, fence); resv 1416 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c amdgpu_sync_resv(adev, &sync, bo->tbo.base.resv, owner, false); resv 1436 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c WARN_ON_ONCE(!dma_resv_is_locked(bo->tbo.base.resv) && resv 44 drivers/gpu/drm/amd/amdgpu/amdgpu_object.h struct dma_resv *resv; resv 193 drivers/gpu/drm/amd/amdgpu/amdgpu_sync.c struct dma_resv *resv, resv 202 drivers/gpu/drm/amd/amdgpu/amdgpu_sync.c if (resv == NULL) resv 206 drivers/gpu/drm/amd/amdgpu/amdgpu_sync.c f = dma_resv_get_excl(resv); resv 209 drivers/gpu/drm/amd/amdgpu/amdgpu_sync.c flist = dma_resv_get_list(resv); resv 215 drivers/gpu/drm/amd/amdgpu/amdgpu_sync.c dma_resv_held(resv)); resv 47 drivers/gpu/drm/amd/amdgpu/amdgpu_sync.h struct dma_resv *resv, resv 69 drivers/gpu/drm/amd/amdgpu/amdgpu_test.c bp.resv = NULL; resv 307 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c struct dma_resv *resv, resv 379 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c resv, &next, false, true); resv 444 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c bo->base.resv, &fence); resv 1487 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c !dma_resv_test_signaled_rcu(bo->base.resv, true)) resv 1494 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c flist = dma_resv_get_list(bo->base.resv); resv 1498 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c dma_resv_held(bo->base.resv)); resv 1965 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c struct dma_resv *resv, resv 1998 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c if (resv) { resv 1999 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c r = amdgpu_sync_resv(adev, &job->sync, resv, resv 2039 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c struct dma_resv *resv, resv 2083 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c if (resv) { resv 2084 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c r = amdgpu_sync_resv(adev, &job->sync, resv, resv 88 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.h struct dma_resv *resv, resv 95 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.h struct dma_resv *resv, resv 99 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.h struct dma_resv *resv, resv 1076 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c r = dma_resv_wait_timeout_rcu(bo->tbo.base.resv, resv 1088 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c r = amdgpu_sync_resv(adev, &job->sync, bo->tbo.base.resv, resv 305 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c if (bo->tbo.base.resv != vm->root.base.bo->tbo.base.resv) resv 586 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c if (abo->tbo.base.resv == vm->root.base.bo->tbo.base.resv) resv 835 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c bp->resv = vm->root.base.bo->tbo.base.resv; resv 1709 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c exclusive = dma_resv_get_excl(bo->tbo.base.resv); resv 1719 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c if (clear || (bo && bo->tbo.base.resv == vm->root.base.bo->tbo.base.resv)) resv 1750 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c if (bo && bo->tbo.base.resv == vm->root.base.bo->tbo.base.resv) { resv 1886 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c struct dma_resv *resv = vm->root.base.bo->tbo.base.resv; resv 1891 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c r = dma_resv_get_fences_rcu(resv, &excl, resv 1897 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c dma_resv_wait_timeout_rcu(resv, true, false, resv 1985 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c struct dma_resv *resv; resv 2000 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c resv = bo_va->base.bo->tbo.base.resv; resv 2004 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c if (!amdgpu_vm_debug && dma_resv_trylock(resv)) resv 2015 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c dma_resv_unlock(resv); resv 2091 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c if (bo && bo->tbo.base.resv == vm->root.base.bo->tbo.base.resv && resv 2423 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c if (dma_resv_locking_ctx(bo->tbo.base.resv) != resv 2451 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c if (bo->tbo.base.resv == vm->root.base.bo->tbo.base.resv) resv 2515 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c if (evicted && bo->tbo.base.resv == vm->root.base.bo->tbo.base.resv) { resv 2526 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c else if (bo->tbo.base.resv == vm->root.base.bo->tbo.base.resv) resv 2656 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c return dma_resv_wait_timeout_rcu(vm->root.base.bo->tbo.base.resv, resv 2731 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c r = dma_resv_reserve_shared(root->tbo.base.resv, 1); resv 75 drivers/gpu/drm/amd/amdgpu/amdgpu_vm_sdma.c r = amdgpu_sync_resv(p->adev, &p->job->sync, root->tbo.base.resv, resv 5766 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c r = dma_resv_wait_timeout_rcu(abo->tbo.base.resv, true, resv 163 drivers/gpu/drm/drm_gem.c if (!obj->resv) resv 164 drivers/gpu/drm/drm_gem.c obj->resv = &obj->_resv; resv 782 drivers/gpu/drm/drm_gem.c ret = dma_resv_wait_timeout_rcu(obj->resv, wait_all, resv 1294 drivers/gpu/drm/drm_gem.c ret = dma_resv_lock_slow_interruptible(obj->resv, resv 1306 drivers/gpu/drm/drm_gem.c ret = dma_resv_lock_interruptible(objs[i]->resv, resv 1312 drivers/gpu/drm/drm_gem.c dma_resv_unlock(objs[j]->resv); resv 1315 drivers/gpu/drm/drm_gem.c dma_resv_unlock(objs[contended]->resv); resv 1340 drivers/gpu/drm/drm_gem.c dma_resv_unlock(objs[i]->resv); resv 1416 drivers/gpu/drm/drm_gem.c dma_resv_get_excl_rcu(obj->resv); resv 1421 drivers/gpu/drm/drm_gem.c ret = dma_resv_get_fences_rcu(obj->resv, NULL, resv 297 drivers/gpu/drm/drm_gem_framebuffer_helper.c fence = dma_resv_get_excl_rcu(obj->resv); resv 836 drivers/gpu/drm/drm_prime.c .resv = obj->resv, resv 900 drivers/gpu/drm/drm_prime.c obj->resv = dma_buf->resv; resv 394 drivers/gpu/drm/etnaviv/etnaviv_gem.c if (!dma_resv_test_signaled_rcu(obj->resv, resv 400 drivers/gpu/drm/etnaviv/etnaviv_gem.c ret = dma_resv_wait_timeout_rcu(obj->resv, resv 456 drivers/gpu/drm/etnaviv/etnaviv_gem.c struct dma_resv *robj = obj->resv; resv 116 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c ww_mutex_unlock(&obj->resv->lock); resv 136 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c ret = ww_mutex_lock_interruptible(&obj->resv->lock, resv 164 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c ret = ww_mutex_lock_slow_interruptible(&obj->resv->lock, resv 182 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c struct dma_resv *robj = bo->obj->base.resv; resv 216 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c dma_resv_add_excl_fence(obj->resv, resv 219 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c dma_resv_add_shared_fence(obj->resv, resv 14394 drivers/gpu/drm/i915/display/intel_display.c old_obj->base.resv, NULL, resv 14438 drivers/gpu/drm/i915/display/intel_display.c obj->base.resv, NULL, resv 14444 drivers/gpu/drm/i915/display/intel_display.c fence = dma_resv_get_excl_rcu(obj->base.resv); resv 113 drivers/gpu/drm/i915/gem/i915_gem_busy.c seq = raw_read_seqcount(&obj->base.resv->seq); resv 117 drivers/gpu/drm/i915/gem/i915_gem_busy.c busy_check_writer(rcu_dereference(obj->base.resv->fence_excl)); resv 120 drivers/gpu/drm/i915/gem/i915_gem_busy.c list = rcu_dereference(obj->base.resv->fence); resv 132 drivers/gpu/drm/i915/gem/i915_gem_busy.c if (args->busy && read_seqcount_retry(&obj->base.resv->seq, seq)) resv 112 drivers/gpu/drm/i915/gem/i915_gem_clflush.c obj->base.resv, NULL, true, resv 115 drivers/gpu/drm/i915/gem/i915_gem_clflush.c dma_resv_add_excl_fence(obj->base.resv, &clflush->base.dma); resv 297 drivers/gpu/drm/i915/gem/i915_gem_client_blt.c obj->base.resv, NULL, resv 303 drivers/gpu/drm/i915/gem/i915_gem_client_blt.c dma_resv_add_excl_fence(obj->base.resv, &work->dma); resv 216 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c exp_info.resv = obj->base.resv; resv 292 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c obj->base.resv = dma_buf->resv; resv 1271 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c !dma_resv_test_signaled_rcu(vma->resv, true))) { resv 1819 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c err = ww_mutex_lock_interruptible(&vma->resv->lock, &acquire); resv 1830 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c ww_mutex_unlock(&eb->vma[j]->resv->lock); resv 1839 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c err = ww_mutex_lock_slow_interruptible(&vma->resv->lock, resv 75 drivers/gpu/drm/i915/gem/i915_gem_fence.c obj->base.resv, NULL, resv 80 drivers/gpu/drm/i915/gem/i915_gem_fence.c dma_resv_add_excl_fence(obj->base.resv, &stub->dma); resv 102 drivers/gpu/drm/i915/gem/i915_gem_object.h #define assert_object_held(obj) dma_resv_assert_held((obj)->base.resv) resv 106 drivers/gpu/drm/i915/gem/i915_gem_object.h dma_resv_lock(obj->base.resv, NULL); resv 112 drivers/gpu/drm/i915/gem/i915_gem_object.h return dma_resv_lock_interruptible(obj->base.resv, NULL); resv 117 drivers/gpu/drm/i915/gem/i915_gem_object.h dma_resv_unlock(obj->base.resv); resv 376 drivers/gpu/drm/i915/gem/i915_gem_object.h fence = dma_resv_get_excl_rcu(obj->base.resv); resv 34 drivers/gpu/drm/i915/gem/i915_gem_wait.c i915_gem_object_wait_reservation(struct dma_resv *resv, resv 46 drivers/gpu/drm/i915/gem/i915_gem_wait.c ret = dma_resv_get_fences_rcu(resv, resv 75 drivers/gpu/drm/i915/gem/i915_gem_wait.c excl = dma_resv_get_excl_rcu(resv); resv 87 drivers/gpu/drm/i915/gem/i915_gem_wait.c if (prune_fences && dma_resv_trylock(resv)) { resv 88 drivers/gpu/drm/i915/gem/i915_gem_wait.c if (dma_resv_test_signaled_rcu(resv, true)) resv 89 drivers/gpu/drm/i915/gem/i915_gem_wait.c dma_resv_add_excl_fence(resv, NULL); resv 90 drivers/gpu/drm/i915/gem/i915_gem_wait.c dma_resv_unlock(resv); resv 143 drivers/gpu/drm/i915/gem/i915_gem_wait.c ret = dma_resv_get_fences_rcu(obj->base.resv, resv 155 drivers/gpu/drm/i915/gem/i915_gem_wait.c excl = dma_resv_get_excl_rcu(obj->base.resv); resv 179 drivers/gpu/drm/i915/gem/i915_gem_wait.c timeout = i915_gem_object_wait_reservation(obj->base.resv, resv 424 drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c struct drm_mm_node resv, *hole; resv 433 drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c memset(&resv, 0, sizeof(resv)); resv 435 drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c resv.start = hole_start; resv 436 drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c resv.size = hole_end - hole_start - 1; /* PAGE_SIZE units */ resv 438 drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c err = drm_mm_reserve_node(mm, &resv); resv 504 drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c drm_mm_remove_node(&resv); resv 46 drivers/gpu/drm/i915/gt/intel_engine_pool.c struct dma_resv *resv = node->obj->base.resv; resv 49 drivers/gpu/drm/i915/gt/intel_engine_pool.c if (dma_resv_trylock(resv)) { resv 50 drivers/gpu/drm/i915/gt/intel_engine_pool.c dma_resv_add_excl_fence(resv, NULL); resv 51 drivers/gpu/drm/i915/gt/intel_engine_pool.c dma_resv_unlock(resv); resv 1070 drivers/gpu/drm/i915/i915_request.c ret = dma_resv_get_fences_rcu(obj->base.resv, resv 1087 drivers/gpu/drm/i915/i915_request.c excl = dma_resv_get_excl_rcu(obj->base.resv); resv 526 drivers/gpu/drm/i915/i915_sw_fence.c struct dma_resv *resv, resv 542 drivers/gpu/drm/i915/i915_sw_fence.c ret = dma_resv_get_fences_rcu(resv, resv 567 drivers/gpu/drm/i915/i915_sw_fence.c excl = dma_resv_get_excl_rcu(resv); resv 86 drivers/gpu/drm/i915/i915_sw_fence.h struct dma_resv *resv, resv 116 drivers/gpu/drm/i915/i915_vma.c vma->resv = obj->base.resv; resv 915 drivers/gpu/drm/i915/i915_vma.c dma_resv_add_excl_fence(vma->resv, &rq->fence); resv 919 drivers/gpu/drm/i915/i915_vma.c err = dma_resv_reserve_shared(vma->resv, 1); resv 923 drivers/gpu/drm/i915/i915_vma.c dma_resv_add_shared_fence(vma->resv, &rq->fence); resv 58 drivers/gpu/drm/i915/i915_vma.h struct dma_resv *resv; /** Alias of obj->resv */ resv 309 drivers/gpu/drm/i915/i915_vma.h #define assert_vma_held(vma) dma_resv_assert_held((vma)->resv) resv 313 drivers/gpu/drm/i915/i915_vma.h dma_resv_lock(vma->resv, NULL); resv 318 drivers/gpu/drm/i915/i915_vma.h dma_resv_unlock(vma->resv); resv 139 drivers/gpu/drm/lima/lima_gem.c err = dma_resv_reserve_shared(bo->gem.resv, 1); resv 165 drivers/gpu/drm/lima/lima_gem.c ret = ww_mutex_lock_interruptible(&bos[i]->gem.resv->lock, ctx); resv 177 drivers/gpu/drm/lima/lima_gem.c ww_mutex_unlock(&bos[i]->gem.resv->lock); resv 180 drivers/gpu/drm/lima/lima_gem.c ww_mutex_unlock(&bos[slow_locked]->gem.resv->lock); resv 185 drivers/gpu/drm/lima/lima_gem.c &bos[contended]->gem.resv->lock, ctx); resv 202 drivers/gpu/drm/lima/lima_gem.c ww_mutex_unlock(&bos[i]->gem.resv->lock); resv 299 drivers/gpu/drm/lima/lima_gem.c dma_resv_add_excl_fence(bos[i]->gem.resv, fence); resv 301 drivers/gpu/drm/lima/lima_gem.c dma_resv_add_shared_fence(bos[i]->gem.resv, fence); resv 709 drivers/gpu/drm/msm/msm_gem.c fobj = dma_resv_get_list(obj->resv); resv 711 drivers/gpu/drm/msm/msm_gem.c fence = dma_resv_get_excl(obj->resv); resv 725 drivers/gpu/drm/msm/msm_gem.c dma_resv_held(obj->resv)); resv 743 drivers/gpu/drm/msm/msm_gem.c dma_resv_add_excl_fence(obj->resv, fence); resv 745 drivers/gpu/drm/msm/msm_gem.c dma_resv_add_shared_fence(obj->resv, fence); resv 770 drivers/gpu/drm/msm/msm_gem.c ret = dma_resv_wait_timeout_rcu(obj->resv, write, resv 802 drivers/gpu/drm/msm/msm_gem.c struct dma_resv *robj = obj->resv; resv 161 drivers/gpu/drm/msm/msm_gem_submit.c ww_mutex_unlock(&msm_obj->base.resv->lock); resv 184 drivers/gpu/drm/msm/msm_gem_submit.c ret = ww_mutex_lock_interruptible(&msm_obj->base.resv->lock, resv 206 drivers/gpu/drm/msm/msm_gem_submit.c ret = ww_mutex_lock_slow_interruptible(&msm_obj->base.resv->lock, resv 232 drivers/gpu/drm/msm/msm_gem_submit.c ret = dma_resv_reserve_shared(msm_obj->base.resv, resv 506 drivers/gpu/drm/nouveau/dispnv50/wndw.c asyw->state.fence = dma_resv_get_excl_rcu(fb->nvbo->bo.base.resv); resv 1356 drivers/gpu/drm/nouveau/nouveau_bo.c struct dma_fence *fence = dma_resv_get_excl(bo->base.resv); resv 1687 drivers/gpu/drm/nouveau/nouveau_bo.c struct dma_resv *resv = nvbo->bo.base.resv; resv 1690 drivers/gpu/drm/nouveau/nouveau_bo.c dma_resv_add_excl_fence(resv, &fence->base); resv 1692 drivers/gpu/drm/nouveau/nouveau_bo.c dma_resv_add_shared_fence(resv, &fence->base); resv 337 drivers/gpu/drm/nouveau/nouveau_fence.c struct dma_resv *resv = nvbo->bo.base.resv; resv 343 drivers/gpu/drm/nouveau/nouveau_fence.c ret = dma_resv_reserve_shared(resv, 1); resv 349 drivers/gpu/drm/nouveau/nouveau_fence.c fobj = dma_resv_get_list(resv); resv 350 drivers/gpu/drm/nouveau/nouveau_fence.c fence = dma_resv_get_excl(resv); resv 379 drivers/gpu/drm/nouveau/nouveau_fence.c dma_resv_held(resv)); resv 892 drivers/gpu/drm/nouveau/nouveau_gem.c lret = dma_resv_wait_timeout_rcu(nvbo->bo.base.resv, write, true, resv 65 drivers/gpu/drm/nouveau/nouveau_prime.c struct dma_resv *robj = attach->dmabuf->resv; resv 315 drivers/gpu/drm/panfrost/panfrost_drv.c ret = dma_resv_wait_timeout_rcu(gem_obj->resv, true, resv 197 drivers/gpu/drm/panfrost/panfrost_job.c implicit_fences[i] = dma_resv_get_excl_rcu(bos[i]->resv); resv 207 drivers/gpu/drm/panfrost/panfrost_job.c dma_resv_add_excl_fence(bos[i]->resv, fence); resv 64 drivers/gpu/drm/qxl/qxl_debugfs.c fobj = rcu_dereference(bo->tbo.base.resv->fence); resv 241 drivers/gpu/drm/qxl/qxl_release.c ret = dma_resv_reserve_shared(bo->tbo.base.resv, 1); resv 461 drivers/gpu/drm/qxl/qxl_release.c dma_resv_add_shared_fence(bo->base.resv, &release->base); resv 463 drivers/gpu/drm/qxl/qxl_release.c dma_resv_unlock(bo->base.resv); resv 3662 drivers/gpu/drm/radeon/cik.c struct dma_resv *resv) resv 3683 drivers/gpu/drm/radeon/cik.c radeon_sync_resv(rdev, &sync, resv, false); resv 582 drivers/gpu/drm/radeon/cik_sdma.c struct dma_resv *resv) resv 603 drivers/gpu/drm/radeon/cik_sdma.c radeon_sync_resv(rdev, &sync, resv, false); resv 111 drivers/gpu/drm/radeon/evergreen_dma.c struct dma_resv *resv) resv 132 drivers/gpu/drm/radeon/evergreen_dma.c radeon_sync_resv(rdev, &sync, resv, false); resv 894 drivers/gpu/drm/radeon/r100.c struct dma_resv *resv) resv 87 drivers/gpu/drm/radeon/r200.c struct dma_resv *resv) resv 2966 drivers/gpu/drm/radeon/r600.c struct dma_resv *resv) resv 2987 drivers/gpu/drm/radeon/r600.c radeon_sync_resv(rdev, &sync, resv, false); resv 447 drivers/gpu/drm/radeon/r600_dma.c struct dma_resv *resv) resv 468 drivers/gpu/drm/radeon/r600_dma.c radeon_sync_resv(rdev, &sync, resv, false); resv 622 drivers/gpu/drm/radeon/radeon.h struct dma_resv *resv, resv 1915 drivers/gpu/drm/radeon/radeon.h struct dma_resv *resv); resv 1921 drivers/gpu/drm/radeon/radeon.h struct dma_resv *resv); resv 1928 drivers/gpu/drm/radeon/radeon.h struct dma_resv *resv); resv 2734 drivers/gpu/drm/radeon/radeon.h #define radeon_copy_blit(rdev, s, d, np, resv) (rdev)->asic->copy.blit((rdev), (s), (d), (np), (resv)) resv 2735 drivers/gpu/drm/radeon/radeon.h #define radeon_copy_dma(rdev, s, d, np, resv) (rdev)->asic->copy.dma((rdev), (s), (d), (np), (resv)) resv 2736 drivers/gpu/drm/radeon/radeon.h #define radeon_copy(rdev, s, d, np, resv) (rdev)->asic->copy.copy((rdev), (s), (d), (np), (resv)) resv 89 drivers/gpu/drm/radeon/radeon_asic.h struct dma_resv *resv); resv 160 drivers/gpu/drm/radeon/radeon_asic.h struct dma_resv *resv); resv 350 drivers/gpu/drm/radeon/radeon_asic.h struct dma_resv *resv); resv 354 drivers/gpu/drm/radeon/radeon_asic.h struct dma_resv *resv); resv 476 drivers/gpu/drm/radeon/radeon_asic.h struct dma_resv *resv); resv 550 drivers/gpu/drm/radeon/radeon_asic.h struct dma_resv *resv); resv 728 drivers/gpu/drm/radeon/radeon_asic.h struct dma_resv *resv); resv 799 drivers/gpu/drm/radeon/radeon_asic.h struct dma_resv *resv); resv 803 drivers/gpu/drm/radeon/radeon_asic.h struct dma_resv *resv); resv 38 drivers/gpu/drm/radeon/radeon_benchmark.c struct dma_resv *resv) resv 51 drivers/gpu/drm/radeon/radeon_benchmark.c resv); resv 56 drivers/gpu/drm/radeon/radeon_benchmark.c resv); resv 125 drivers/gpu/drm/radeon/radeon_benchmark.c dobj->tbo.base.resv); resv 136 drivers/gpu/drm/radeon/radeon_benchmark.c dobj->tbo.base.resv); resv 258 drivers/gpu/drm/radeon/radeon_cs.c struct dma_resv *resv; resv 260 drivers/gpu/drm/radeon/radeon_cs.c resv = reloc->robj->tbo.base.resv; resv 261 drivers/gpu/drm/radeon/radeon_cs.c r = radeon_sync_resv(p->rdev, &p->ib.sync, resv, resv 538 drivers/gpu/drm/radeon/radeon_display.c work->fence = dma_fence_get(dma_resv_get_excl(new_rbo->tbo.base.resv)); resv 117 drivers/gpu/drm/radeon/radeon_gem.c r = dma_resv_wait_timeout_rcu(robj->tbo.base.resv, true, true, 30 * HZ); resv 454 drivers/gpu/drm/radeon/radeon_gem.c r = dma_resv_test_signaled_rcu(robj->tbo.base.resv, true); resv 483 drivers/gpu/drm/radeon/radeon_gem.c ret = dma_resv_wait_timeout_rcu(robj->tbo.base.resv, true, true, 30 * HZ); resv 108 drivers/gpu/drm/radeon/radeon_mn.c r = dma_resv_wait_timeout_rcu(bo->tbo.base.resv, resv 186 drivers/gpu/drm/radeon/radeon_object.c struct dma_resv *resv, resv 264 drivers/gpu/drm/radeon/radeon_object.c sg, resv, &radeon_ttm_bo_destroy); resv 613 drivers/gpu/drm/radeon/radeon_object.c dma_resv_assert_held(bo->tbo.base.resv); resv 739 drivers/gpu/drm/radeon/radeon_object.c dma_resv_assert_held(bo->tbo.base.resv); resv 751 drivers/gpu/drm/radeon/radeon_object.c dma_resv_assert_held(bo->tbo.base.resv); resv 873 drivers/gpu/drm/radeon/radeon_object.c struct dma_resv *resv = bo->tbo.base.resv; resv 876 drivers/gpu/drm/radeon/radeon_object.c dma_resv_add_shared_fence(resv, &fence->base); resv 878 drivers/gpu/drm/radeon/radeon_object.c dma_resv_add_excl_fence(resv, &fence->base); resv 129 drivers/gpu/drm/radeon/radeon_object.h struct dma_resv *resv, resv 66 drivers/gpu/drm/radeon/radeon_prime.c struct dma_resv *resv = attach->dmabuf->resv; resv 71 drivers/gpu/drm/radeon/radeon_prime.c dma_resv_lock(resv, NULL); resv 73 drivers/gpu/drm/radeon/radeon_prime.c RADEON_GEM_DOMAIN_GTT, 0, sg, resv, &bo); resv 74 drivers/gpu/drm/radeon/radeon_prime.c dma_resv_unlock(resv); resv 90 drivers/gpu/drm/radeon/radeon_sync.c struct dma_resv *resv, resv 100 drivers/gpu/drm/radeon/radeon_sync.c f = dma_resv_get_excl(resv); resv 107 drivers/gpu/drm/radeon/radeon_sync.c flist = dma_resv_get_list(resv); resv 113 drivers/gpu/drm/radeon/radeon_sync.c dma_resv_held(resv)); resv 123 drivers/gpu/drm/radeon/radeon_test.c vram_obj->tbo.base.resv); resv 127 drivers/gpu/drm/radeon/radeon_test.c vram_obj->tbo.base.resv); resv 174 drivers/gpu/drm/radeon/radeon_test.c vram_obj->tbo.base.resv); resv 178 drivers/gpu/drm/radeon/radeon_test.c vram_obj->tbo.base.resv); resv 247 drivers/gpu/drm/radeon/radeon_ttm.c fence = radeon_copy(rdev, old_start, new_start, num_pages, bo->base.resv); resv 480 drivers/gpu/drm/radeon/radeon_uvd.c f = dma_resv_get_excl(bo->tbo.base.resv); resv 705 drivers/gpu/drm/radeon/radeon_vm.c radeon_sync_resv(rdev, &ib.sync, pd->tbo.base.resv, true); resv 833 drivers/gpu/drm/radeon/radeon_vm.c radeon_sync_resv(rdev, &ib->sync, pt->tbo.base.resv, true); resv 834 drivers/gpu/drm/radeon/radeon_vm.c r = dma_resv_reserve_shared(pt->tbo.base.resv, 1); resv 45 drivers/gpu/drm/radeon/rv770_dma.c struct dma_resv *resv) resv 66 drivers/gpu/drm/radeon/rv770_dma.c radeon_sync_resv(rdev, &sync, resv, false); resv 234 drivers/gpu/drm/radeon/si_dma.c struct dma_resv *resv) resv 255 drivers/gpu/drm/radeon/si_dma.c radeon_sync_resv(rdev, &sync, resv, false); resv 176 drivers/gpu/drm/ttm/ttm_bo.c dma_resv_assert_held(bo->base.resv); resv 248 drivers/gpu/drm/ttm/ttm_bo.c dma_resv_assert_held(bo->base.resv); resv 281 drivers/gpu/drm/ttm/ttm_bo.c dma_resv_assert_held(pos->first->base.resv); resv 282 drivers/gpu/drm/ttm/ttm_bo.c dma_resv_assert_held(pos->last->base.resv); resv 296 drivers/gpu/drm/ttm/ttm_bo.c dma_resv_assert_held(pos->first->base.resv); resv 297 drivers/gpu/drm/ttm/ttm_bo.c dma_resv_assert_held(pos->last->base.resv); resv 311 drivers/gpu/drm/ttm/ttm_bo.c dma_resv_assert_held(pos->first->base.resv); resv 312 drivers/gpu/drm/ttm/ttm_bo.c dma_resv_assert_held(pos->last->base.resv); resv 443 drivers/gpu/drm/ttm/ttm_bo.c if (bo->base.resv == &bo->base._resv) resv 448 drivers/gpu/drm/ttm/ttm_bo.c r = dma_resv_copy_fences(&bo->base._resv, bo->base.resv); resv 468 drivers/gpu/drm/ttm/ttm_bo.c dma_resv_held(bo->base.resv)); resv 486 drivers/gpu/drm/ttm/ttm_bo.c dma_resv_wait_timeout_rcu(bo->base.resv, true, false, resv 493 drivers/gpu/drm/ttm/ttm_bo.c ret = dma_resv_trylock(bo->base.resv) ? 0 : -EBUSY; resv 498 drivers/gpu/drm/ttm/ttm_bo.c if (bo->base.resv != &bo->base._resv) resv 502 drivers/gpu/drm/ttm/ttm_bo.c dma_resv_unlock(bo->base.resv); resv 518 drivers/gpu/drm/ttm/ttm_bo.c dma_resv_unlock(bo->base.resv); resv 520 drivers/gpu/drm/ttm/ttm_bo.c if (bo->base.resv != &bo->base._resv) { resv 552 drivers/gpu/drm/ttm/ttm_bo.c struct dma_resv *resv; resv 556 drivers/gpu/drm/ttm/ttm_bo.c resv = bo->base.resv; resv 558 drivers/gpu/drm/ttm/ttm_bo.c resv = &bo->base._resv; resv 560 drivers/gpu/drm/ttm/ttm_bo.c if (dma_resv_test_signaled_rcu(resv, true)) resv 569 drivers/gpu/drm/ttm/ttm_bo.c dma_resv_unlock(bo->base.resv); resv 572 drivers/gpu/drm/ttm/ttm_bo.c lret = dma_resv_wait_timeout_rcu(resv, true, resv 582 drivers/gpu/drm/ttm/ttm_bo.c if (unlock_resv && !dma_resv_trylock(bo->base.resv)) { resv 599 drivers/gpu/drm/ttm/ttm_bo.c dma_resv_unlock(bo->base.resv); resv 612 drivers/gpu/drm/ttm/ttm_bo.c dma_resv_unlock(bo->base.resv); resv 638 drivers/gpu/drm/ttm/ttm_bo.c if (remove_all || bo->base.resv != &bo->base._resv) { resv 640 drivers/gpu/drm/ttm/ttm_bo.c dma_resv_lock(bo->base.resv, NULL); resv 645 drivers/gpu/drm/ttm/ttm_bo.c } else if (dma_resv_trylock(bo->base.resv)) { resv 717 drivers/gpu/drm/ttm/ttm_bo.c dma_resv_assert_held(bo->base.resv); resv 787 drivers/gpu/drm/ttm/ttm_bo.c if (bo->base.resv == ctx->resv) { resv 788 drivers/gpu/drm/ttm/ttm_bo.c dma_resv_assert_held(bo->base.resv); resv 796 drivers/gpu/drm/ttm/ttm_bo.c ret = dma_resv_trylock(bo->base.resv); resv 824 drivers/gpu/drm/ttm/ttm_bo.c r = dma_resv_lock_interruptible(busy_bo->base.resv, resv 827 drivers/gpu/drm/ttm/ttm_bo.c r = dma_resv_lock(busy_bo->base.resv, ticket); resv 835 drivers/gpu/drm/ttm/ttm_bo.c dma_resv_unlock(busy_bo->base.resv); resv 861 drivers/gpu/drm/ttm/ttm_bo.c dma_resv_locking_ctx(bo->base.resv)) resv 869 drivers/gpu/drm/ttm/ttm_bo.c dma_resv_unlock(bo->base.resv); resv 947 drivers/gpu/drm/ttm/ttm_bo.c dma_resv_add_shared_fence(bo->base.resv, fence); resv 949 drivers/gpu/drm/ttm/ttm_bo.c ret = dma_resv_reserve_shared(bo->base.resv, 1); resv 974 drivers/gpu/drm/ttm/ttm_bo.c ticket = dma_resv_locking_ctx(bo->base.resv); resv 1104 drivers/gpu/drm/ttm/ttm_bo.c ret = dma_resv_reserve_shared(bo->base.resv, 1); resv 1189 drivers/gpu/drm/ttm/ttm_bo.c dma_resv_assert_held(bo->base.resv); resv 1259 drivers/gpu/drm/ttm/ttm_bo.c dma_resv_assert_held(bo->base.resv); resv 1296 drivers/gpu/drm/ttm/ttm_bo.c struct dma_resv *resv, resv 1348 drivers/gpu/drm/ttm/ttm_bo.c if (resv) { resv 1349 drivers/gpu/drm/ttm/ttm_bo.c bo->base.resv = resv; resv 1350 drivers/gpu/drm/ttm/ttm_bo.c dma_resv_assert_held(bo->base.resv); resv 1352 drivers/gpu/drm/ttm/ttm_bo.c bo->base.resv = &bo->base._resv; resv 1376 drivers/gpu/drm/ttm/ttm_bo.c if (!resv) { resv 1377 drivers/gpu/drm/ttm/ttm_bo.c locked = dma_resv_trylock(bo->base.resv); resv 1385 drivers/gpu/drm/ttm/ttm_bo.c if (!resv) resv 1392 drivers/gpu/drm/ttm/ttm_bo.c if (resv && !(bo->mem.placement & TTM_PL_FLAG_NO_EVICT)) { resv 1411 drivers/gpu/drm/ttm/ttm_bo.c struct dma_resv *resv, resv 1419 drivers/gpu/drm/ttm/ttm_bo.c sg, resv, destroy); resv 1423 drivers/gpu/drm/ttm/ttm_bo.c if (!resv) resv 1821 drivers/gpu/drm/ttm/ttm_bo.c if (dma_resv_test_signaled_rcu(bo->base.resv, true)) resv 1827 drivers/gpu/drm/ttm/ttm_bo.c timeout = dma_resv_wait_timeout_rcu(bo->base.resv, true, resv 1835 drivers/gpu/drm/ttm/ttm_bo.c dma_resv_add_excl_fence(bo->base.resv, NULL); resv 1951 drivers/gpu/drm/ttm/ttm_bo.c dma_resv_unlock(bo->base.resv); resv 1989 drivers/gpu/drm/ttm/ttm_bo.c if (!dma_resv_is_locked(bo->base.resv)) resv 1991 drivers/gpu/drm/ttm/ttm_bo.c ret = dma_resv_lock_interruptible(bo->base.resv, NULL); resv 1996 drivers/gpu/drm/ttm/ttm_bo.c dma_resv_unlock(bo->base.resv); resv 520 drivers/gpu/drm/ttm/ttm_bo_util.c fbo->base.base.resv = &fbo->base.base._resv; resv 521 drivers/gpu/drm/ttm/ttm_bo_util.c dma_resv_init(fbo->base.base.resv); resv 522 drivers/gpu/drm/ttm/ttm_bo_util.c ret = dma_resv_trylock(fbo->base.base.resv); resv 692 drivers/gpu/drm/ttm/ttm_bo_util.c dma_resv_add_excl_fence(bo->base.resv, fence); resv 719 drivers/gpu/drm/ttm/ttm_bo_util.c dma_resv_add_excl_fence(ghost_obj->base.resv, fence); resv 755 drivers/gpu/drm/ttm/ttm_bo_util.c dma_resv_add_excl_fence(bo->base.resv, fence); resv 775 drivers/gpu/drm/ttm/ttm_bo_util.c dma_resv_add_excl_fence(ghost_obj->base.resv, fence); resv 844 drivers/gpu/drm/ttm/ttm_bo_util.c ret = dma_resv_copy_fences(ghost->base.resv, bo->base.resv); resv 74 drivers/gpu/drm/ttm/ttm_bo_vm.c dma_resv_unlock(bo->base.resv); resv 134 drivers/gpu/drm/ttm/ttm_bo_vm.c if (unlikely(!dma_resv_trylock(bo->base.resv))) { resv 297 drivers/gpu/drm/ttm/ttm_bo_vm.c dma_resv_unlock(bo->base.resv); resv 42 drivers/gpu/drm/ttm/ttm_execbuf_util.c dma_resv_unlock(bo->base.resv); resv 74 drivers/gpu/drm/ttm/ttm_execbuf_util.c dma_resv_unlock(bo->base.resv); resv 117 drivers/gpu/drm/ttm/ttm_execbuf_util.c dma_resv_unlock(bo->base.resv); resv 133 drivers/gpu/drm/ttm/ttm_execbuf_util.c ret = dma_resv_reserve_shared(bo->base.resv, resv 147 drivers/gpu/drm/ttm/ttm_execbuf_util.c ret = dma_resv_lock_slow_interruptible(bo->base.resv, resv 150 drivers/gpu/drm/ttm/ttm_execbuf_util.c dma_resv_lock_slow(bo->base.resv, ticket); resv 156 drivers/gpu/drm/ttm/ttm_execbuf_util.c ret = dma_resv_reserve_shared(bo->base.resv, resv 204 drivers/gpu/drm/ttm/ttm_execbuf_util.c dma_resv_add_shared_fence(bo->base.resv, fence); resv 206 drivers/gpu/drm/ttm/ttm_execbuf_util.c dma_resv_add_excl_fence(bo->base.resv, fence); resv 211 drivers/gpu/drm/ttm/ttm_execbuf_util.c dma_resv_unlock(bo->base.resv); resv 51 drivers/gpu/drm/ttm/ttm_tt.c dma_resv_assert_held(bo->base.resv); resv 498 drivers/gpu/drm/v3d/v3d_gem.c dma_resv_add_excl_fence(job->bo[i]->resv, resv 546 drivers/gpu/drm/vc4/vc4_gem.c dma_resv_add_shared_fence(bo->base.base.resv, exec->fence); resv 557 drivers/gpu/drm/vc4/vc4_gem.c dma_resv_add_excl_fence(bo->base.base.resv, exec->fence); resv 571 drivers/gpu/drm/vc4/vc4_gem.c ww_mutex_unlock(&bo->resv->lock); resv 598 drivers/gpu/drm/vc4/vc4_gem.c ret = ww_mutex_lock_slow_interruptible(&bo->resv->lock, resv 612 drivers/gpu/drm/vc4/vc4_gem.c ret = ww_mutex_lock_interruptible(&bo->resv->lock, acquire_ctx); resv 618 drivers/gpu/drm/vc4/vc4_gem.c ww_mutex_unlock(&bo->resv->lock); resv 624 drivers/gpu/drm/vc4/vc4_gem.c ww_mutex_unlock(&bo->resv->lock); resv 645 drivers/gpu/drm/vc4/vc4_gem.c ret = dma_resv_reserve_shared(bo->resv, 1); resv 131 drivers/gpu/drm/vgem/vgem_fence.c struct dma_resv *resv; resv 153 drivers/gpu/drm/vgem/vgem_fence.c resv = obj->resv; resv 154 drivers/gpu/drm/vgem/vgem_fence.c if (!dma_resv_test_signaled_rcu(resv, resv 162 drivers/gpu/drm/vgem/vgem_fence.c dma_resv_lock(resv, NULL); resv 164 drivers/gpu/drm/vgem/vgem_fence.c dma_resv_add_excl_fence(resv, fence); resv 165 drivers/gpu/drm/vgem/vgem_fence.c else if ((ret = dma_resv_reserve_shared(resv, 1)) == 0) resv 166 drivers/gpu/drm/vgem/vgem_fence.c dma_resv_add_shared_fence(resv, fence); resv 167 drivers/gpu/drm/vgem/vgem_fence.c dma_resv_unlock(resv); resv 399 drivers/gpu/drm/virtio/virtgpu_ioctl.c dma_resv_add_excl_fence(qobj->tbo.base.resv, resv 453 drivers/gpu/drm/virtio/virtgpu_ioctl.c dma_resv_add_excl_fence(qobj->tbo.base.resv, resv 215 drivers/gpu/drm/virtio/virtgpu_plane.c dma_resv_add_excl_fence(bo->tbo.base.resv, resv 462 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c dma_resv_assert_held(dst->base.resv); resv 464 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c dma_resv_assert_held(src->base.resv); resv 344 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c dma_resv_assert_held(bo->base.resv); resv 694 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c (bo->base.resv, true, true, resv 1011 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c dma_resv_add_excl_fence(bo->base.resv, &fence->base); resv 1014 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c dma_resv_add_excl_fence(bo->base.resv, &fence->base); resv 174 drivers/gpu/drm/vmwgfx/vmwgfx_cotable.c dma_resv_assert_held(bo->base.resv); resv 316 drivers/gpu/drm/vmwgfx/vmwgfx_cotable.c dma_resv_assert_held(bo->base.resv); resv 44 drivers/gpu/drm/vmwgfx/vmwgfx_resource.c dma_resv_assert_held(res->backup->base.base.resv); resv 59 drivers/gpu/drm/vmwgfx/vmwgfx_resource.c dma_resv_assert_held(backup->base.base.resv); resv 722 drivers/gpu/drm/vmwgfx/vmwgfx_resource.c dma_resv_assert_held(vbo->base.base.resv); resv 128 drivers/infiniband/hw/qib/qib_mad.h __be16 resv; resv 47 drivers/input/mouse/cyapa_gen6.c u8 resv; /* Reserved, must be 0 */ resv 54 drivers/input/mouse/cyapa_gen6.c u8 resv; /* Reserved, must be 0 */ resv 141 drivers/iommu/amd_iommu_init.c u64 resv; resv 5679 drivers/iommu/intel-iommu.c struct iommu_resv_region *resv; resv 5692 drivers/iommu/intel-iommu.c resv = iommu_alloc_resv_region(rmrr->base_address, resv 5694 drivers/iommu/intel-iommu.c if (!resv) resv 5697 drivers/iommu/intel-iommu.c list_add_tail(&resv->list, head); resv 141 drivers/net/ethernet/hisilicon/hns3/hns3vf/hclgevf_cmd.h u8 resv; resv 113 drivers/net/ethernet/netronome/nfp/bpf/fw.h __be32 resv; resv 29 drivers/net/ethernet/netronome/nfp/crypto/fw.h u8 resv[3]; resv 78 drivers/net/ethernet/netronome/nfp/crypto/fw.h u8 resv[3]; resv 321 drivers/net/ethernet/netronome/nfp/crypto/tls.c memset(front->resv, 0, sizeof(front->resv)); resv 440 drivers/net/ethernet/netronome/nfp/crypto/tls.c memset(req->resv, 0, sizeof(req->resv)); resv 455 drivers/net/ethernet/netronome/nfp/nfp_net.h u8 resv; resv 459 drivers/net/ethernet/netronome/nfp/nfp_net.h u8 resv, u8 class, u8 major, u8 minor) resv 461 drivers/net/ethernet/netronome/nfp/nfp_net.h return fw_ver->resv == resv && resv 3771 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nn->fw_ver.resv, nn->fw_ver.class, resv 199 drivers/net/ethernet/netronome/nfp/nfp_net_ethtool.c nn->fw_ver.resv, nn->fw_ver.class, resv 675 drivers/net/ethernet/netronome/nfp/nfp_net_main.c if (fw_ver.resv || fw_ver.class != NFP_NET_CFG_VERSION_CLASS_GENERIC) { resv 677 drivers/net/ethernet/netronome/nfp/nfp_net_main.c fw_ver.resv, fw_ver.class, fw_ver.major, fw_ver.minor); resv 693 drivers/net/ethernet/netronome/nfp/nfp_net_main.c fw_ver.resv, fw_ver.class, resv 119 drivers/net/ethernet/netronome/nfp/nfp_netvf_main.c if (fw_ver.resv || fw_ver.class != NFP_NET_CFG_VERSION_CLASS_GENERIC) { resv 121 drivers/net/ethernet/netronome/nfp/nfp_netvf_main.c fw_ver.resv, fw_ver.class, fw_ver.major, fw_ver.minor); resv 141 drivers/net/ethernet/netronome/nfp/nfp_netvf_main.c fw_ver.resv, fw_ver.class, resv 95 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_hwinfo.c __le32 resv; resv 76 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp_eth.c u8 resv[2]; resv 106 drivers/net/ethernet/rocker/rocker_hw.h u16 resv[5]; resv 6654 drivers/net/ethernet/sun/niu.c tp->resv = 0; resv 2822 drivers/net/ethernet/sun/niu.h __le64 resv; resv 41 drivers/nvme/host/lightnvm.c __le64 resv; resv 56 drivers/nvme/host/lightnvm.c __le64 resv; resv 158 drivers/nvme/host/lightnvm.c __u8 resv[228]; resv 182 drivers/nvme/host/lightnvm.c __u8 resv[4]; resv 188 drivers/nvme/host/lightnvm.c __u8 resv[6]; resv 1047 drivers/scsi/bfa/bfa_defs_svc.h u8 resv[2]; resv 1249 drivers/scsi/ipr.h u8 resv; resv 6309 drivers/scsi/lpfc/lpfc_els.c switch (rscn_did.un.b.resv & RSCN_ADDRESS_FORMAT_MASK) { resv 1024 drivers/scsi/lpfc/lpfc_hw.h uint8_t resv; resv 1032 drivers/scsi/lpfc/lpfc_hw.h uint8_t resv; resv 4790 drivers/scsi/qla2xxx/qla_def.h u8 resv; resv 1558 drivers/vfio/vfio_iommu_type1.c struct iommu_resv_region *resv; resv 1561 drivers/vfio/vfio_iommu_type1.c list_for_each_entry(resv, resv_regions, list) { resv 1564 drivers/vfio/vfio_iommu_type1.c if (resv->type == IOMMU_RESV_DIRECT_RELAXABLE) resv 1567 drivers/vfio/vfio_iommu_type1.c start = resv->start; resv 1568 drivers/vfio/vfio_iommu_type1.c end = resv->start + resv->length - 1; resv 3988 fs/io_uring.c for (i = 0; i < ARRAY_SIZE(p.resv); i++) { resv 3989 fs/io_uring.c if (p.resv[i]) resv 495 fs/nfsd/nfscache.c struct kvec *resv = &rqstp->rq_res.head[0], *cachv; resv 507 fs/nfsd/nfscache.c len = resv->iov_len - ((char*)statp - (char*)resv->iov_base); resv 42 fs/ocfs2/localalloc.c struct ocfs2_alloc_reservation *resv); resv 827 fs/ocfs2/localalloc.c struct ocfs2_alloc_reservation *resv) resv 840 fs/ocfs2/localalloc.c if (!resv) { resv 844 fs/ocfs2/localalloc.c resv = &r; resv 848 fs/ocfs2/localalloc.c if (ocfs2_resmap_resv_bits(resmap, resv, &bitoff, &numfound) == 0) { resv 903 fs/ocfs2/localalloc.c ocfs2_resv_discard(resmap, resv); resv 45 fs/ocfs2/reservations.c struct ocfs2_alloc_reservation *resv) resv 50 fs/ocfs2/reservations.c if (!(resv->r_flags & OCFS2_RESV_FLAG_DIR)) { resv 59 fs/ocfs2/reservations.c static inline unsigned int ocfs2_resv_end(struct ocfs2_alloc_reservation *resv) resv 61 fs/ocfs2/reservations.c if (resv->r_len) resv 62 fs/ocfs2/reservations.c return resv->r_start + resv->r_len - 1; resv 63 fs/ocfs2/reservations.c return resv->r_start; resv 66 fs/ocfs2/reservations.c static inline int ocfs2_resv_empty(struct ocfs2_alloc_reservation *resv) resv 68 fs/ocfs2/reservations.c return !!(resv->r_len == 0); resv 82 fs/ocfs2/reservations.c struct ocfs2_alloc_reservation *resv; resv 90 fs/ocfs2/reservations.c resv = rb_entry(node, struct ocfs2_alloc_reservation, r_node); resv 93 fs/ocfs2/reservations.c "\tlast_len: %u\n", resv->r_start, resv 94 fs/ocfs2/reservations.c ocfs2_resv_end(resv), resv->r_len, resv->r_last_start, resv 95 fs/ocfs2/reservations.c resv->r_last_len); resv 104 fs/ocfs2/reservations.c list_for_each_entry(resv, &resmap->m_lru, r_lru) { resv 106 fs/ocfs2/reservations.c "last_start: %u\tlast_len: %u\n", i, resv->r_start, resv 107 fs/ocfs2/reservations.c ocfs2_resv_end(resv), resv->r_len, resv->r_last_start, resv 108 fs/ocfs2/reservations.c resv->r_last_len); resv 117 fs/ocfs2/reservations.c struct ocfs2_alloc_reservation *resv) resv 120 fs/ocfs2/reservations.c unsigned int start = resv->r_start; resv 121 fs/ocfs2/reservations.c unsigned int end = ocfs2_resv_end(resv); resv 141 fs/ocfs2/reservations.c struct ocfs2_alloc_reservation *resv; resv 145 fs/ocfs2/reservations.c resv = rb_entry(node, struct ocfs2_alloc_reservation, r_node); resv 147 fs/ocfs2/reservations.c if (i > 0 && resv->r_start <= off) { resv 153 fs/ocfs2/reservations.c if (resv->r_len == 0) { resv 159 fs/ocfs2/reservations.c if (resv->r_start > ocfs2_resv_end(resv)) { resv 165 fs/ocfs2/reservations.c if (ocfs2_resv_end(resv) >= resmap->m_bitmap_len) { resv 171 fs/ocfs2/reservations.c if (ocfs2_validate_resmap_bits(resmap, i, resv)) resv 174 fs/ocfs2/reservations.c off = ocfs2_resv_end(resv); resv 192 fs/ocfs2/reservations.c void ocfs2_resv_init_once(struct ocfs2_alloc_reservation *resv) resv 194 fs/ocfs2/reservations.c memset(resv, 0, sizeof(*resv)); resv 195 fs/ocfs2/reservations.c INIT_LIST_HEAD(&resv->r_lru); resv 198 fs/ocfs2/reservations.c void ocfs2_resv_set_type(struct ocfs2_alloc_reservation *resv, resv 203 fs/ocfs2/reservations.c resv->r_flags |= flags; resv 220 fs/ocfs2/reservations.c struct ocfs2_alloc_reservation *resv) resv 224 fs/ocfs2/reservations.c if (!list_empty(&resv->r_lru)) resv 225 fs/ocfs2/reservations.c list_del_init(&resv->r_lru); resv 227 fs/ocfs2/reservations.c list_add_tail(&resv->r_lru, &resmap->m_lru); resv 230 fs/ocfs2/reservations.c static void __ocfs2_resv_trunc(struct ocfs2_alloc_reservation *resv) resv 232 fs/ocfs2/reservations.c resv->r_len = 0; resv 233 fs/ocfs2/reservations.c resv->r_start = 0; resv 237 fs/ocfs2/reservations.c struct ocfs2_alloc_reservation *resv) resv 239 fs/ocfs2/reservations.c if (resv->r_flags & OCFS2_RESV_FLAG_INUSE) { resv 240 fs/ocfs2/reservations.c list_del_init(&resv->r_lru); resv 241 fs/ocfs2/reservations.c rb_erase(&resv->r_node, &resmap->m_reservations); resv 242 fs/ocfs2/reservations.c resv->r_flags &= ~OCFS2_RESV_FLAG_INUSE; resv 247 fs/ocfs2/reservations.c struct ocfs2_alloc_reservation *resv) resv 251 fs/ocfs2/reservations.c __ocfs2_resv_trunc(resv); resv 256 fs/ocfs2/reservations.c resv->r_last_len = resv->r_last_start = 0; resv 258 fs/ocfs2/reservations.c ocfs2_resv_remove(resmap, resv); resv 263 fs/ocfs2/reservations.c struct ocfs2_alloc_reservation *resv) resv 265 fs/ocfs2/reservations.c if (resv) { resv 267 fs/ocfs2/reservations.c __ocfs2_resv_discard(resmap, resv); resv 275 fs/ocfs2/reservations.c struct ocfs2_alloc_reservation *resv; resv 280 fs/ocfs2/reservations.c resv = rb_entry(node, struct ocfs2_alloc_reservation, r_node); resv 282 fs/ocfs2/reservations.c __ocfs2_resv_discard(resmap, resv); resv 361 fs/ocfs2/reservations.c struct ocfs2_alloc_reservation *resv = NULL; resv 372 fs/ocfs2/reservations.c resv = rb_entry(node, struct ocfs2_alloc_reservation, r_node); resv 374 fs/ocfs2/reservations.c if (resv->r_start <= goal && ocfs2_resv_end(resv) >= goal) resv 378 fs/ocfs2/reservations.c if (resv->r_start > goal) { resv 379 fs/ocfs2/reservations.c resv = prev_resv; resv 383 fs/ocfs2/reservations.c prev_resv = resv; resv 387 fs/ocfs2/reservations.c return resv; resv 463 fs/ocfs2/reservations.c struct ocfs2_alloc_reservation *resv, resv 481 fs/ocfs2/reservations.c trace_ocfs2_resv_find_window_begin(resv->r_start, ocfs2_resv_end(resv), resv 504 fs/ocfs2/reservations.c resv->r_start = cstart; resv 505 fs/ocfs2/reservations.c resv->r_len = clen; resv 507 fs/ocfs2/reservations.c ocfs2_resv_insert(resmap, resv); resv 616 fs/ocfs2/reservations.c resv->r_start = best_start; resv 617 fs/ocfs2/reservations.c resv->r_len = best_len; resv 618 fs/ocfs2/reservations.c ocfs2_resv_insert(resmap, resv); resv 623 fs/ocfs2/reservations.c struct ocfs2_alloc_reservation *resv, resv 627 fs/ocfs2/reservations.c int tmpwindow = !!(resv->r_flags & OCFS2_RESV_FLAG_TMP); resv 631 fs/ocfs2/reservations.c min_bits = ocfs2_resv_window_bits(resmap, resv) >> 1; resv 659 fs/ocfs2/reservations.c resv->r_start = lru_resv->r_start; resv 660 fs/ocfs2/reservations.c resv->r_len = lru_resv->r_len; resv 672 fs/ocfs2/reservations.c resv->r_start = ocfs2_resv_end(lru_resv) + 1; resv 673 fs/ocfs2/reservations.c resv->r_len = shrink; resv 676 fs/ocfs2/reservations.c trace_ocfs2_cannibalize_resv_end(resv->r_start, ocfs2_resv_end(resv), resv 677 fs/ocfs2/reservations.c resv->r_len, resv->r_last_start, resv 678 fs/ocfs2/reservations.c resv->r_last_len); resv 680 fs/ocfs2/reservations.c ocfs2_resv_insert(resmap, resv); resv 684 fs/ocfs2/reservations.c struct ocfs2_alloc_reservation *resv, resv 689 fs/ocfs2/reservations.c BUG_ON(!ocfs2_resv_empty(resv)); resv 696 fs/ocfs2/reservations.c if (resv->r_last_len) { resv 697 fs/ocfs2/reservations.c goal = resv->r_last_start + resv->r_last_len; resv 702 fs/ocfs2/reservations.c __ocfs2_resv_find_window(resmap, resv, goal, wanted); resv 705 fs/ocfs2/reservations.c if (ocfs2_resv_empty(resv) && goal != 0) resv 706 fs/ocfs2/reservations.c __ocfs2_resv_find_window(resmap, resv, 0, wanted); resv 708 fs/ocfs2/reservations.c if (ocfs2_resv_empty(resv)) { resv 713 fs/ocfs2/reservations.c ocfs2_cannibalize_resv(resmap, resv, wanted); resv 716 fs/ocfs2/reservations.c BUG_ON(ocfs2_resv_empty(resv)); resv 720 fs/ocfs2/reservations.c struct ocfs2_alloc_reservation *resv, resv 723 fs/ocfs2/reservations.c if (resv == NULL || ocfs2_resmap_disabled(resmap)) resv 728 fs/ocfs2/reservations.c if (ocfs2_resv_empty(resv)) { resv 734 fs/ocfs2/reservations.c unsigned int wanted = ocfs2_resv_window_bits(resmap, resv); resv 736 fs/ocfs2/reservations.c if ((resv->r_flags & OCFS2_RESV_FLAG_TMP) || wanted < *clen) resv 746 fs/ocfs2/reservations.c ocfs2_resv_find_window(resmap, resv, wanted); resv 747 fs/ocfs2/reservations.c trace_ocfs2_resmap_resv_bits(resv->r_start, resv->r_len); resv 750 fs/ocfs2/reservations.c BUG_ON(ocfs2_resv_empty(resv)); resv 752 fs/ocfs2/reservations.c *cstart = resv->r_start; resv 753 fs/ocfs2/reservations.c *clen = resv->r_len; resv 761 fs/ocfs2/reservations.c struct ocfs2_alloc_reservation *resv, resv 765 fs/ocfs2/reservations.c unsigned int old_end = ocfs2_resv_end(resv); resv 767 fs/ocfs2/reservations.c BUG_ON(start != resv->r_start || old_end < end); resv 773 fs/ocfs2/reservations.c __ocfs2_resv_discard(resmap, resv); resv 784 fs/ocfs2/reservations.c resv->r_start = end + 1; resv 785 fs/ocfs2/reservations.c resv->r_len = old_end - resv->r_start + 1; resv 789 fs/ocfs2/reservations.c struct ocfs2_alloc_reservation *resv, resv 797 fs/ocfs2/reservations.c if (resv == NULL) resv 800 fs/ocfs2/reservations.c BUG_ON(cstart != resv->r_start); resv 804 fs/ocfs2/reservations.c trace_ocfs2_resmap_claimed_bits_begin(cstart, cend, clen, resv->r_start, resv 805 fs/ocfs2/reservations.c ocfs2_resv_end(resv), resv->r_len, resv 806 fs/ocfs2/reservations.c resv->r_last_start, resv 807 fs/ocfs2/reservations.c resv->r_last_len); resv 809 fs/ocfs2/reservations.c BUG_ON(cstart < resv->r_start); resv 810 fs/ocfs2/reservations.c BUG_ON(cstart > ocfs2_resv_end(resv)); resv 811 fs/ocfs2/reservations.c BUG_ON(cend > ocfs2_resv_end(resv)); resv 813 fs/ocfs2/reservations.c ocfs2_adjust_resv_from_alloc(resmap, resv, cstart, cend); resv 814 fs/ocfs2/reservations.c resv->r_last_start = cstart; resv 815 fs/ocfs2/reservations.c resv->r_last_len = clen; resv 821 fs/ocfs2/reservations.c if (!ocfs2_resv_empty(resv)) resv 822 fs/ocfs2/reservations.c ocfs2_resv_mark_lru(resmap, resv); resv 824 fs/ocfs2/reservations.c trace_ocfs2_resmap_claimed_bits_end(resv->r_start, ocfs2_resv_end(resv), resv 825 fs/ocfs2/reservations.c resv->r_len, resv->r_last_start, resv 826 fs/ocfs2/reservations.c resv->r_last_len); resv 56 fs/ocfs2/reservations.h void ocfs2_resv_init_once(struct ocfs2_alloc_reservation *resv); resv 59 fs/ocfs2/reservations.h void ocfs2_resv_set_type(struct ocfs2_alloc_reservation *resv, resv 73 fs/ocfs2/reservations.h struct ocfs2_alloc_reservation *resv); resv 129 fs/ocfs2/reservations.h struct ocfs2_alloc_reservation *resv, resv 148 fs/ocfs2/reservations.h struct ocfs2_alloc_reservation *resv, resv 131 fs/xfs/libxfs/xfs_ag_resv.c struct xfs_ag_resv *resv; resv 137 fs/xfs/libxfs/xfs_ag_resv.c resv = xfs_perag_resv(pag, type); resv 139 fs/xfs/libxfs/xfs_ag_resv.c pag->pag_mount->m_ag_max_usable += resv->ar_asked; resv 146 fs/xfs/libxfs/xfs_ag_resv.c oldresv = resv->ar_orig_reserved; resv 148 fs/xfs/libxfs/xfs_ag_resv.c oldresv = resv->ar_reserved; resv 150 fs/xfs/libxfs/xfs_ag_resv.c resv->ar_reserved = 0; resv 151 fs/xfs/libxfs/xfs_ag_resv.c resv->ar_asked = 0; resv 152 fs/xfs/libxfs/xfs_ag_resv.c resv->ar_orig_reserved = 0; resv 183 fs/xfs/libxfs/xfs_ag_resv.c struct xfs_ag_resv *resv; resv 231 fs/xfs/libxfs/xfs_ag_resv.c resv = xfs_perag_resv(pag, type); resv 232 fs/xfs/libxfs/xfs_ag_resv.c resv->ar_asked = ask; resv 233 fs/xfs/libxfs/xfs_ag_resv.c resv->ar_orig_reserved = hidden_space; resv 234 fs/xfs/libxfs/xfs_ag_resv.c resv->ar_reserved = ask - used; resv 324 fs/xfs/libxfs/xfs_ag_resv.c struct xfs_ag_resv *resv; resv 335 fs/xfs/libxfs/xfs_ag_resv.c resv = xfs_perag_resv(pag, type); resv 347 fs/xfs/libxfs/xfs_ag_resv.c len = min_t(xfs_extlen_t, args->len, resv->ar_reserved); resv 348 fs/xfs/libxfs/xfs_ag_resv.c resv->ar_reserved -= len; resv 368 fs/xfs/libxfs/xfs_ag_resv.c struct xfs_ag_resv *resv; resv 377 fs/xfs/libxfs/xfs_ag_resv.c resv = xfs_perag_resv(pag, type); resv 387 fs/xfs/libxfs/xfs_ag_resv.c leftover = min_t(xfs_extlen_t, len, resv->ar_asked - resv->ar_reserved); resv 388 fs/xfs/libxfs/xfs_ag_resv.c resv->ar_reserved += leftover; resv 735 fs/xfs/libxfs/xfs_alloc.c args->resv == XFS_AG_RESV_AGFL || resv 843 fs/xfs/libxfs/xfs_alloc.c ASSERT(!args->wasfromfl || args->resv != XFS_AG_RESV_AGFL); resv 865 fs/xfs/libxfs/xfs_alloc.c xfs_ag_resv_alloc_extent(args->pag, args->resv, args); resv 2041 fs/xfs/libxfs/xfs_alloc.c reservation = xfs_ag_resv_needed(pag, args->resv); resv 2345 fs/xfs/libxfs/xfs_alloc.c targs.resv = XFS_AG_RESV_AGFL; resv 78 fs/xfs/libxfs/xfs_alloc.h enum xfs_ag_resv_type resv; /* block reservation to use */ resv 3555 fs/xfs/libxfs/xfs_bmap.c args.resv = XFS_AG_RESV_NONE; resv 76 fs/xfs/libxfs/xfs_ialloc_btree.c enum xfs_ag_resv_type resv) resv 91 fs/xfs/libxfs/xfs_ialloc_btree.c args.resv = resv; resv 135 fs/xfs/libxfs/xfs_ialloc_btree.c enum xfs_ag_resv_type resv) resv 139 fs/xfs/libxfs/xfs_ialloc_btree.c &XFS_RMAP_OINFO_INOBT, resv); resv 73 fs/xfs/libxfs/xfs_refcount_btree.c args.resv = XFS_AG_RESV_METADATA; resv 287 fs/xfs/scrub/repair.c enum xfs_ag_resv_type resv) resv 293 fs/xfs/scrub/repair.c switch (resv) { resv 304 fs/xfs/scrub/repair.c if (resv == XFS_AG_RESV_RMAPBT) resv 319 fs/xfs/scrub/repair.c args.resv = resv; resv 524 fs/xfs/scrub/repair.c enum xfs_ag_resv_type resv) resv 573 fs/xfs/scrub/repair.c else if (resv == XFS_AG_RESV_AGFL) resv 576 fs/xfs/scrub/repair.c error = xfs_free_extent(sc->tp, fsbno, 1, oinfo, resv); resv 26 fs/xfs/scrub/repair.h enum xfs_ag_resv_type resv); resv 1533 fs/xfs/xfs_trace.h xfs_extlen_t len, enum xfs_ag_resv_type resv, int haveleft, resv 1535 fs/xfs/xfs_trace.h TP_ARGS(mp, agno, agbno, len, resv, haveleft, haveright), resv 1541 fs/xfs/xfs_trace.h __field(int, resv) resv 1550 fs/xfs/xfs_trace.h __entry->resv = resv; resv 1559 fs/xfs/xfs_trace.h __entry->resv, resv 1586 fs/xfs/xfs_trace.h __field(int, resv) resv 1607 fs/xfs/xfs_trace.h __entry->resv = args->resv; resv 1631 fs/xfs/xfs_trace.h __entry->resv, resv 2577 fs/xfs/xfs_trace.h TP_PROTO(struct xfs_perag *pag, enum xfs_ag_resv_type resv, resv 2579 fs/xfs/xfs_trace.h TP_ARGS(pag, resv, len), resv 2583 fs/xfs/xfs_trace.h __field(int, resv) resv 2591 fs/xfs/xfs_trace.h struct xfs_ag_resv *r = xfs_perag_resv(pag, resv); resv 2595 fs/xfs/xfs_trace.h __entry->resv = resv; resv 2606 fs/xfs/xfs_trace.h __entry->resv, resv 30 fs/xfs/xfs_trans.c struct xfs_trans_res resv; resv 39 fs/xfs/xfs_trans.c xfs_log_get_max_trans_res(mp, &resv); resv 40 fs/xfs/xfs_trans.c trace_xfs_trans_resv_calc(mp, -1, &resv); resv 315 fs/xfs/xfs_trans.c struct xfs_trans_res resv = {0}; resv 317 fs/xfs/xfs_trans.c return xfs_trans_alloc(mp, &resv, 0, 0, XFS_TRANS_NO_WRITECOUNT, tpp); resv 279 include/drm/drm_gem.h struct dma_resv *resv; resv 276 include/drm/ttm/ttm_bo_api.h struct dma_resv *resv; resv 529 include/drm/ttm/ttm_bo_api.h struct dma_resv *resv, resv 573 include/drm/ttm/ttm_bo_api.h struct sg_table *sg, struct dma_resv *resv, resv 667 include/drm/ttm/ttm_bo_driver.h success = dma_resv_trylock(bo->base.resv); resv 672 include/drm/ttm/ttm_bo_driver.h ret = dma_resv_lock_interruptible(bo->base.resv, ticket); resv 674 include/drm/ttm/ttm_bo_driver.h ret = dma_resv_lock(bo->base.resv, ticket); resv 758 include/drm/ttm/ttm_bo_driver.h ret = dma_resv_lock_slow_interruptible(bo->base.resv, resv 761 include/drm/ttm/ttm_bo_driver.h dma_resv_lock_slow(bo->base.resv, ticket); resv 786 include/drm/ttm/ttm_bo_driver.h dma_resv_unlock(bo->base.resv); resv 309 include/linux/dma-buf.h struct dma_resv *resv; resv 368 include/linux/dma-buf.h struct dma_resv *resv; resv 1018 include/linux/mlx4/device.h __be16 resv; resv 448 include/linux/nvme.h __u8 resv[2048]; resv 1250 include/linux/nvme.h __u8 resv[3]; resv 173 include/rdma/ib_mad.h __be16 resv; resv 55 include/rdma/ib_smi.h __be16 resv; resv 118 include/rdma/ib_smi.h u8 resv; resv 59 include/rdma/opa_smi.h __be16 resv; resv 71 include/uapi/linux/igmp.h resv:4; resv 73 include/uapi/linux/igmp.h __u8 resv:4, resv 115 include/uapi/linux/io_uring.h __u64 resv[2]; resv 134 include/uapi/linux/io_uring.h __u32 resv[4]; resv 442 include/uapi/rdma/mlx5-abi.h __u32 resv; resv 499 kernel/irq/affinity.c unsigned int resv = affd->pre_vectors + affd->post_vectors; resv 502 kernel/irq/affinity.c if (resv > minvec) resv 506 kernel/irq/affinity.c set_vecs = maxvec - resv; resv 513 kernel/irq/affinity.c return resv + min(set_vecs, maxvec - resv); resv 262 mm/hugetlb.c static long region_add(struct resv_map *resv, long f, long t) resv 264 mm/hugetlb.c struct list_head *head = &resv->regions; resv 268 mm/hugetlb.c spin_lock(&resv->lock); resv 281 mm/hugetlb.c VM_BUG_ON(resv->region_cache_count <= 0); resv 283 mm/hugetlb.c resv->region_cache_count--; resv 284 mm/hugetlb.c nrg = list_first_entry(&resv->region_cache, struct file_region, resv 330 mm/hugetlb.c resv->adds_in_progress--; resv 331 mm/hugetlb.c spin_unlock(&resv->lock); resv 358 mm/hugetlb.c static long region_chg(struct resv_map *resv, long f, long t) resv 360 mm/hugetlb.c struct list_head *head = &resv->regions; resv 365 mm/hugetlb.c spin_lock(&resv->lock); resv 367 mm/hugetlb.c resv->adds_in_progress++; resv 373 mm/hugetlb.c if (resv->adds_in_progress > resv->region_cache_count) { resv 376 mm/hugetlb.c VM_BUG_ON(resv->adds_in_progress - resv->region_cache_count > 1); resv 378 mm/hugetlb.c resv->adds_in_progress--; resv 379 mm/hugetlb.c spin_unlock(&resv->lock); resv 387 mm/hugetlb.c spin_lock(&resv->lock); resv 388 mm/hugetlb.c list_add(&trg->link, &resv->region_cache); resv 389 mm/hugetlb.c resv->region_cache_count++; resv 403 mm/hugetlb.c resv->adds_in_progress--; resv 404 mm/hugetlb.c spin_unlock(&resv->lock); resv 443 mm/hugetlb.c spin_unlock(&resv->lock); resv 448 mm/hugetlb.c spin_unlock(&resv->lock); resv 463 mm/hugetlb.c static void region_abort(struct resv_map *resv, long f, long t) resv 465 mm/hugetlb.c spin_lock(&resv->lock); resv 466 mm/hugetlb.c VM_BUG_ON(!resv->region_cache_count); resv 467 mm/hugetlb.c resv->adds_in_progress--; resv 468 mm/hugetlb.c spin_unlock(&resv->lock); resv 485 mm/hugetlb.c static long region_del(struct resv_map *resv, long f, long t) resv 487 mm/hugetlb.c struct list_head *head = &resv->regions; resv 493 mm/hugetlb.c spin_lock(&resv->lock); resv 514 mm/hugetlb.c resv->region_cache_count > resv->adds_in_progress) { resv 515 mm/hugetlb.c nrg = list_first_entry(&resv->region_cache, resv 519 mm/hugetlb.c resv->region_cache_count--; resv 523 mm/hugetlb.c spin_unlock(&resv->lock); resv 561 mm/hugetlb.c spin_unlock(&resv->lock); resv 592 mm/hugetlb.c static long region_count(struct resv_map *resv, long f, long t) resv 594 mm/hugetlb.c struct list_head *head = &resv->regions; resv 598 mm/hugetlb.c spin_lock(&resv->lock); resv 614 mm/hugetlb.c spin_unlock(&resv->lock); resv 1989 mm/hugetlb.c struct resv_map *resv; resv 1993 mm/hugetlb.c resv = vma_resv_map(vma); resv 1994 mm/hugetlb.c if (!resv) resv 2000 mm/hugetlb.c ret = region_chg(resv, idx, idx + 1); resv 2003 mm/hugetlb.c ret = region_add(resv, idx, idx + 1); resv 2006 mm/hugetlb.c region_abort(resv, idx, idx + 1); resv 2011 mm/hugetlb.c ret = region_add(resv, idx, idx + 1); resv 2013 mm/hugetlb.c region_abort(resv, idx, idx + 1); resv 2014 mm/hugetlb.c ret = region_del(resv, idx, idx + 1); resv 3281 mm/hugetlb.c struct resv_map *resv = vma_resv_map(vma); resv 3291 mm/hugetlb.c if (resv && is_vma_resv_set(vma, HPAGE_RESV_OWNER)) resv 3292 mm/hugetlb.c kref_get(&resv->refs); resv 3298 mm/hugetlb.c struct resv_map *resv = vma_resv_map(vma); resv 3303 mm/hugetlb.c if (!resv || !is_vma_resv_set(vma, HPAGE_RESV_OWNER)) resv 3309 mm/hugetlb.c reserve = (end - start) - region_count(resv, start, end); resv 3311 mm/hugetlb.c kref_put(&resv->refs, resv_map_release); resv 294 net/bridge/br_multicast.c ihv3->resv = 0; resv 659 net/sunrpc/auth_gss/svcauth_gss.c svc_safe_putnetobj(struct kvec *resv, struct xdr_netobj *o) resv 663 net/sunrpc/auth_gss/svcauth_gss.c if (resv->iov_len + 4 > PAGE_SIZE) resv 665 net/sunrpc/auth_gss/svcauth_gss.c svc_putnl(resv, o->len); resv 666 net/sunrpc/auth_gss/svcauth_gss.c p = resv->iov_base + resv->iov_len; resv 667 net/sunrpc/auth_gss/svcauth_gss.c resv->iov_len += round_up_to_quad(o->len); resv 668 net/sunrpc/auth_gss/svcauth_gss.c if (resv->iov_len > PAGE_SIZE) resv 1143 net/sunrpc/auth_gss/svcauth_gss.c gss_write_resv(struct kvec *resv, size_t size_limit, resv 1147 net/sunrpc/auth_gss/svcauth_gss.c if (resv->iov_len + 4 > size_limit) resv 1149 net/sunrpc/auth_gss/svcauth_gss.c svc_putnl(resv, RPC_SUCCESS); resv 1150 net/sunrpc/auth_gss/svcauth_gss.c if (svc_safe_putnetobj(resv, out_handle)) resv 1152 net/sunrpc/auth_gss/svcauth_gss.c if (resv->iov_len + 3 * 4 > size_limit) resv 1154 net/sunrpc/auth_gss/svcauth_gss.c svc_putnl(resv, major_status); resv 1155 net/sunrpc/auth_gss/svcauth_gss.c svc_putnl(resv, minor_status); resv 1156 net/sunrpc/auth_gss/svcauth_gss.c svc_putnl(resv, GSS_SEQ_WIN); resv 1157 net/sunrpc/auth_gss/svcauth_gss.c if (svc_safe_putnetobj(resv, out_token)) resv 1173 net/sunrpc/auth_gss/svcauth_gss.c struct kvec *resv = &rqstp->rq_res.head[0]; resv 1198 net/sunrpc/auth_gss/svcauth_gss.c if (gss_write_resv(resv, PAGE_SIZE, resv 1285 net/sunrpc/auth_gss/svcauth_gss.c struct kvec *resv = &rqstp->rq_res.head[0]; resv 1336 net/sunrpc/auth_gss/svcauth_gss.c if (gss_write_resv(resv, PAGE_SIZE, resv 1482 net/sunrpc/auth_gss/svcauth_gss.c struct kvec *resv = &rqstp->rq_res.head[0]; resv 1488 net/sunrpc/auth_gss/svcauth_gss.c __be32 *reject_stat = resv->iov_base + resv->iov_len; resv 1568 net/sunrpc/auth_gss/svcauth_gss.c if (resv->iov_len + 4 > PAGE_SIZE) resv 1570 net/sunrpc/auth_gss/svcauth_gss.c svc_putnl(resv, RPC_SUCCESS); resv 1574 net/sunrpc/auth_gss/svcauth_gss.c svcdata->verf_start = resv->iov_base + resv->iov_len; resv 1585 net/sunrpc/auth_gss/svcauth_gss.c svc_putnl(resv, 0); resv 1586 net/sunrpc/auth_gss/svcauth_gss.c svc_putnl(resv, 0); resv 1594 net/sunrpc/auth_gss/svcauth_gss.c svc_putnl(resv, 0); resv 1595 net/sunrpc/auth_gss/svcauth_gss.c svc_putnl(resv, 0); resv 1667 net/sunrpc/auth_gss/svcauth_gss.c struct kvec *resv; resv 1691 net/sunrpc/auth_gss/svcauth_gss.c resv = &resbuf->tail[0]; resv 1692 net/sunrpc/auth_gss/svcauth_gss.c mic.data = (u8 *)resv->iov_base + resv->iov_len + 4; resv 1695 net/sunrpc/auth_gss/svcauth_gss.c svc_putnl(resv, mic.len); resv 1698 net/sunrpc/auth_gss/svcauth_gss.c resv->iov_len += XDR_QUADLEN(mic.len) << 2; resv 1701 net/sunrpc/auth_gss/svcauth_gss.c BUG_ON(resv->iov_len > PAGE_SIZE); resv 1191 net/sunrpc/svc.c struct kvec *resv = &rqstp->rq_res.head[0]; resv 1218 net/sunrpc/svc.c !procp->pc_encode(rqstp, resv->iov_base + resv->iov_len)) { resv 1283 net/sunrpc/svc.c svc_process_common(struct svc_rqst *rqstp, struct kvec *argv, struct kvec *resv) resv 1306 net/sunrpc/svc.c svc_putu32(resv, rqstp->rq_xid); resv 1311 net/sunrpc/svc.c svc_putnl(resv, 1); /* REPLY */ resv 1317 net/sunrpc/svc.c reply_statp = resv->iov_base + resv->iov_len; resv 1319 net/sunrpc/svc.c svc_putnl(resv, 0); /* ACCEPT */ resv 1383 net/sunrpc/svc.c statp = resv->iov_base +resv->iov_len; resv 1384 net/sunrpc/svc.c svc_putnl(resv, RPC_SUCCESS); resv 1409 net/sunrpc/svc.c resv->iov_len = ((void*)statp) - resv->iov_base + 4; resv 1444 net/sunrpc/svc.c svc_putnl(resv, 1); /* REJECT */ resv 1445 net/sunrpc/svc.c svc_putnl(resv, 0); /* RPC_MISMATCH */ resv 1446 net/sunrpc/svc.c svc_putnl(resv, 2); /* Only RPCv2 supported */ resv 1447 net/sunrpc/svc.c svc_putnl(resv, 2); resv 1458 net/sunrpc/svc.c svc_putnl(resv, 1); /* REJECT */ resv 1459 net/sunrpc/svc.c svc_putnl(resv, 1); /* AUTH_ERROR */ resv 1460 net/sunrpc/svc.c svc_putnl(resv, ntohl(auth_stat)); /* status */ resv 1466 net/sunrpc/svc.c svc_putnl(resv, RPC_PROG_UNAVAIL); resv 1474 net/sunrpc/svc.c svc_putnl(resv, RPC_PROG_MISMATCH); resv 1475 net/sunrpc/svc.c svc_putnl(resv, process.mismatch.lovers); resv 1476 net/sunrpc/svc.c svc_putnl(resv, process.mismatch.hivers); resv 1483 net/sunrpc/svc.c svc_putnl(resv, RPC_PROC_UNAVAIL); resv 1492 net/sunrpc/svc.c svc_putnl(resv, ntohl(rpc_stat)); resv 1503 net/sunrpc/svc.c struct kvec *resv = &rqstp->rq_res.head[0]; resv 1512 net/sunrpc/svc.c resv->iov_base = page_address(rqstp->rq_respages[0]); resv 1513 net/sunrpc/svc.c resv->iov_len = 0; resv 1532 net/sunrpc/svc.c svc_putnl(resv, 0); resv 1535 net/sunrpc/svc.c if (likely(svc_process_common(rqstp, argv, resv))) resv 1554 net/sunrpc/svc.c struct kvec *resv = &rqstp->rq_res.head[0]; resv 1586 net/sunrpc/svc.c resv->iov_len = 0; resv 1596 net/sunrpc/svc.c proc_error = svc_process_common(rqstp, argv, resv); resv 738 net/sunrpc/svcauth_unix.c struct kvec *resv = &rqstp->rq_res.head[0]; resv 763 net/sunrpc/svcauth_unix.c svc_putnl(resv, RPC_AUTH_NULL); resv 764 net/sunrpc/svcauth_unix.c svc_putnl(resv, 0); resv 798 net/sunrpc/svcauth_unix.c struct kvec *resv = &rqstp->rq_res.head[0]; resv 842 net/sunrpc/svcauth_unix.c svc_putnl(resv, RPC_AUTH_NULL); resv 843 net/sunrpc/svcauth_unix.c svc_putnl(resv, 0);