ref 134 arch/arc/include/asm/entry.h mov r12, sp ; save SP as ref to pt_regs ref 63 arch/arm/kernel/fiq.c static int fiq_def_op(void *ref, int relinquish) ref 212 arch/mips/ath79/clock.c static u32 __init ar934x_get_pll_freq(u32 ref, u32 ref_div, u32 nint, u32 nfrac, ref 218 arch/mips/ath79/clock.c t = ref; ref 223 arch/mips/ath79/clock.c t = ref; ref 156 arch/powerpc/include/asm/pmac_pfunc.h struct kref ref; ref 989 arch/powerpc/kvm/book3s_64_mmu_radix.c int ref = 0; ref 1002 arch/powerpc/kvm/book3s_64_mmu_radix.c ref = 1; ref 1004 arch/powerpc/kvm/book3s_64_mmu_radix.c return ref; ref 1014 arch/powerpc/kvm/book3s_64_mmu_radix.c int ref = 0; ref 1018 arch/powerpc/kvm/book3s_64_mmu_radix.c ref = 1; ref 1019 arch/powerpc/kvm/book3s_64_mmu_radix.c return ref; ref 622 arch/powerpc/kvm/book3s_hv_nested.c long ref; ref 634 arch/powerpc/kvm/book3s_hv_nested.c ref = gp->refcnt; ref 636 arch/powerpc/kvm/book3s_hv_nested.c if (ref == 0) ref 737 arch/powerpc/kvm/book3s_hv_nested.c long ref; ref 740 arch/powerpc/kvm/book3s_hv_nested.c ref = --gp->refcnt; ref 742 arch/powerpc/kvm/book3s_hv_nested.c if (ref == 0) ref 46 arch/powerpc/kvm/e500.h struct tlbe_ref ref; ref 189 arch/powerpc/kvm/e500_mmu_host.c struct tlbe_ref *ref = &vcpu_e500->gtlb_priv[tlbsel][esel].ref; ref 192 arch/powerpc/kvm/e500_mmu_host.c if (!(ref->flags & E500_TLB_VALID)) { ref 193 arch/powerpc/kvm/e500_mmu_host.c WARN(ref->flags & (E500_TLB_BITMAP | E500_TLB_TLB0), ref 194 arch/powerpc/kvm/e500_mmu_host.c "%s: flags %x\n", __func__, ref->flags); ref 198 arch/powerpc/kvm/e500_mmu_host.c if (tlbsel == 1 && ref->flags & E500_TLB_BITMAP) { ref 216 arch/powerpc/kvm/e500_mmu_host.c ref->flags &= ~(E500_TLB_BITMAP | E500_TLB_VALID); ref 220 arch/powerpc/kvm/e500_mmu_host.c if (tlbsel == 1 && ref->flags & E500_TLB_TLB0) { ref 226 arch/powerpc/kvm/e500_mmu_host.c ref->flags &= ~(E500_TLB_TLB0 | E500_TLB_VALID); ref 233 arch/powerpc/kvm/e500_mmu_host.c if (ref->flags & E500_TLB_VALID) ref 237 arch/powerpc/kvm/e500_mmu_host.c ref->flags = 0; ref 245 arch/powerpc/kvm/e500_mmu_host.c static inline void kvmppc_e500_ref_setup(struct tlbe_ref *ref, ref 249 arch/powerpc/kvm/e500_mmu_host.c ref->pfn = pfn; ref 250 arch/powerpc/kvm/e500_mmu_host.c ref->flags = E500_TLB_VALID; ref 253 arch/powerpc/kvm/e500_mmu_host.c ref->flags |= (gtlbe->mas2 & MAS2_ATTRIB_MASK) | wimg; ref 262 arch/powerpc/kvm/e500_mmu_host.c static inline void kvmppc_e500_ref_release(struct tlbe_ref *ref) ref 264 arch/powerpc/kvm/e500_mmu_host.c if (ref->flags & E500_TLB_VALID) { ref 266 arch/powerpc/kvm/e500_mmu_host.c trace_kvm_booke206_ref_release(ref->pfn, ref->flags); ref 267 arch/powerpc/kvm/e500_mmu_host.c ref->flags = 0; ref 288 arch/powerpc/kvm/e500_mmu_host.c struct tlbe_ref *ref = ref 289 arch/powerpc/kvm/e500_mmu_host.c &vcpu_e500->gtlb_priv[tlbsel][i].ref; ref 290 arch/powerpc/kvm/e500_mmu_host.c kvmppc_e500_ref_release(ref); ref 307 arch/powerpc/kvm/e500_mmu_host.c int tsize, struct tlbe_ref *ref, u64 gvaddr, ref 310 arch/powerpc/kvm/e500_mmu_host.c kvm_pfn_t pfn = ref->pfn; ref 313 arch/powerpc/kvm/e500_mmu_host.c BUG_ON(!(ref->flags & E500_TLB_VALID)); ref 317 arch/powerpc/kvm/e500_mmu_host.c stlbe->mas2 = (gvaddr & MAS2_EPN) | (ref->flags & E500_TLB_MAS2_ATTR); ref 325 arch/powerpc/kvm/e500_mmu_host.c struct tlbe_ref *ref) ref 493 arch/powerpc/kvm/e500_mmu_host.c kvmppc_e500_ref_setup(ref, gtlbe, pfn, wimg); ref 496 arch/powerpc/kvm/e500_mmu_host.c ref, gvaddr, stlbe); ref 515 arch/powerpc/kvm/e500_mmu_host.c struct tlbe_ref *ref; ref 521 arch/powerpc/kvm/e500_mmu_host.c ref = &vcpu_e500->gtlb_priv[0][esel].ref; ref 525 arch/powerpc/kvm/e500_mmu_host.c gtlbe, 0, stlbe, ref); ref 535 arch/powerpc/kvm/e500_mmu_host.c struct tlbe_ref *ref, ref 548 arch/powerpc/kvm/e500_mmu_host.c vcpu_e500->gtlb_priv[1][esel].ref.flags |= E500_TLB_BITMAP; ref 551 arch/powerpc/kvm/e500_mmu_host.c WARN_ON(!(ref->flags & E500_TLB_VALID)); ref 563 arch/powerpc/kvm/e500_mmu_host.c struct tlbe_ref *ref = &vcpu_e500->gtlb_priv[1][esel].ref; ref 568 arch/powerpc/kvm/e500_mmu_host.c ref); ref 574 arch/powerpc/kvm/e500_mmu_host.c vcpu_e500->gtlb_priv[1][esel].ref.flags |= E500_TLB_TLB0; ref 580 arch/powerpc/kvm/e500_mmu_host.c sesel = kvmppc_e500_tlb1_map_tlb1(vcpu_e500, ref, esel); ref 602 arch/powerpc/kvm/e500_mmu_host.c if (!(priv->ref.flags & E500_TLB_VALID)) { ref 606 arch/powerpc/kvm/e500_mmu_host.c &priv->ref, eaddr, &stlbe); ref 158 arch/powerpc/oprofile/cell/spu_task_sync.c struct kref *ref; ref 169 arch/powerpc/oprofile/cell/spu_task_sync.c ref = spu_get_profile_private_kref(the_spu->ctx); ref 170 arch/powerpc/oprofile/cell/spu_task_sync.c if (ref) { ref 171 arch/powerpc/oprofile/cell/spu_task_sync.c spu_info[spu_num] = container_of(ref, struct cached_info, cache_ref); ref 337 arch/powerpc/perf/imc-pmu.c struct imc_pmu_ref *ref; ref 388 arch/powerpc/perf/imc-pmu.c ref = get_nest_pmu_ref(cpu); ref 389 arch/powerpc/perf/imc-pmu.c if (!ref) ref 392 arch/powerpc/perf/imc-pmu.c ref->refc = 0; ref 438 arch/powerpc/perf/imc-pmu.c struct imc_pmu_ref *ref; ref 451 arch/powerpc/perf/imc-pmu.c ref = get_nest_pmu_ref(event->cpu); ref 452 arch/powerpc/perf/imc-pmu.c if (!ref) ref 456 arch/powerpc/perf/imc-pmu.c mutex_lock(&ref->lock); ref 457 arch/powerpc/perf/imc-pmu.c if (ref->refc == 0) { ref 468 arch/powerpc/perf/imc-pmu.c mutex_unlock(&ref->lock); ref 471 arch/powerpc/perf/imc-pmu.c ref->refc--; ref 472 arch/powerpc/perf/imc-pmu.c if (ref->refc == 0) { ref 476 arch/powerpc/perf/imc-pmu.c mutex_unlock(&ref->lock); ref 480 arch/powerpc/perf/imc-pmu.c } else if (ref->refc < 0) { ref 482 arch/powerpc/perf/imc-pmu.c ref->refc = 0; ref 484 arch/powerpc/perf/imc-pmu.c mutex_unlock(&ref->lock); ref 493 arch/powerpc/perf/imc-pmu.c struct imc_pmu_ref *ref; ref 546 arch/powerpc/perf/imc-pmu.c ref = get_nest_pmu_ref(event->cpu); ref 547 arch/powerpc/perf/imc-pmu.c if (!ref) ref 550 arch/powerpc/perf/imc-pmu.c mutex_lock(&ref->lock); ref 551 arch/powerpc/perf/imc-pmu.c if (ref->refc == 0) { ref 555 arch/powerpc/perf/imc-pmu.c mutex_unlock(&ref->lock); ref 561 arch/powerpc/perf/imc-pmu.c ++ref->refc; ref 562 arch/powerpc/perf/imc-pmu.c mutex_unlock(&ref->lock); ref 655 arch/powerpc/perf/imc-pmu.c struct imc_pmu_ref *ref; ref 696 arch/powerpc/perf/imc-pmu.c ref = &core_imc_refc[core_id]; ref 697 arch/powerpc/perf/imc-pmu.c if (!ref) ref 700 arch/powerpc/perf/imc-pmu.c ref->refc = 0; ref 716 arch/powerpc/perf/imc-pmu.c struct imc_pmu_ref *ref; ref 729 arch/powerpc/perf/imc-pmu.c ref = &core_imc_refc[core_id]; ref 730 arch/powerpc/perf/imc-pmu.c if (!ref) ref 733 arch/powerpc/perf/imc-pmu.c mutex_lock(&ref->lock); ref 734 arch/powerpc/perf/imc-pmu.c if (ref->refc == 0) { ref 745 arch/powerpc/perf/imc-pmu.c mutex_unlock(&ref->lock); ref 748 arch/powerpc/perf/imc-pmu.c ref->refc--; ref 749 arch/powerpc/perf/imc-pmu.c if (ref->refc == 0) { ref 753 arch/powerpc/perf/imc-pmu.c mutex_unlock(&ref->lock); ref 757 arch/powerpc/perf/imc-pmu.c } else if (ref->refc < 0) { ref 759 arch/powerpc/perf/imc-pmu.c ref->refc = 0; ref 761 arch/powerpc/perf/imc-pmu.c mutex_unlock(&ref->lock); ref 770 arch/powerpc/perf/imc-pmu.c struct imc_pmu_ref *ref; ref 798 arch/powerpc/perf/imc-pmu.c ref = &core_imc_refc[core_id]; ref 799 arch/powerpc/perf/imc-pmu.c if (!ref) ref 808 arch/powerpc/perf/imc-pmu.c mutex_lock(&ref->lock); ref 809 arch/powerpc/perf/imc-pmu.c if (ref->refc == 0) { ref 813 arch/powerpc/perf/imc-pmu.c mutex_unlock(&ref->lock); ref 819 arch/powerpc/perf/imc-pmu.c ++ref->refc; ref 820 arch/powerpc/perf/imc-pmu.c mutex_unlock(&ref->lock); ref 1022 arch/powerpc/perf/imc-pmu.c struct imc_pmu_ref *ref; ref 1041 arch/powerpc/perf/imc-pmu.c ref = &core_imc_refc[core_id]; ref 1042 arch/powerpc/perf/imc-pmu.c if (!ref) ref 1045 arch/powerpc/perf/imc-pmu.c mutex_lock(&ref->lock); ref 1046 arch/powerpc/perf/imc-pmu.c if (ref->refc == 0) { ref 1049 arch/powerpc/perf/imc-pmu.c mutex_unlock(&ref->lock); ref 1055 arch/powerpc/perf/imc-pmu.c ++ref->refc; ref 1056 arch/powerpc/perf/imc-pmu.c mutex_unlock(&ref->lock); ref 1064 arch/powerpc/perf/imc-pmu.c struct imc_pmu_ref *ref; ref 1069 arch/powerpc/perf/imc-pmu.c ref = &core_imc_refc[core_id]; ref 1071 arch/powerpc/perf/imc-pmu.c mutex_lock(&ref->lock); ref 1072 arch/powerpc/perf/imc-pmu.c ref->refc--; ref 1073 arch/powerpc/perf/imc-pmu.c if (ref->refc == 0) { ref 1076 arch/powerpc/perf/imc-pmu.c mutex_unlock(&ref->lock); ref 1081 arch/powerpc/perf/imc-pmu.c } else if (ref->refc < 0) { ref 1082 arch/powerpc/perf/imc-pmu.c ref->refc = 0; ref 1084 arch/powerpc/perf/imc-pmu.c mutex_unlock(&ref->lock); ref 1222 arch/powerpc/perf/imc-pmu.c struct imc_pmu_ref *ref = NULL; ref 1230 arch/powerpc/perf/imc-pmu.c ref = &core_imc_refc[core_id]; ref 1231 arch/powerpc/perf/imc-pmu.c if (!ref) { ref 1234 arch/powerpc/perf/imc-pmu.c ref = &trace_imc_refc[core_id]; ref 1235 arch/powerpc/perf/imc-pmu.c if (!ref) ref 1239 arch/powerpc/perf/imc-pmu.c mutex_lock(&ref->lock); ref 1240 arch/powerpc/perf/imc-pmu.c if (ref->refc == 0) { ref 1243 arch/powerpc/perf/imc-pmu.c mutex_unlock(&ref->lock); ref 1249 arch/powerpc/perf/imc-pmu.c ++ref->refc; ref 1250 arch/powerpc/perf/imc-pmu.c mutex_unlock(&ref->lock); ref 1275 arch/powerpc/perf/imc-pmu.c struct imc_pmu_ref *ref = NULL; ref 1278 arch/powerpc/perf/imc-pmu.c ref = &core_imc_refc[core_id]; ref 1279 arch/powerpc/perf/imc-pmu.c if (!ref) { ref 1282 arch/powerpc/perf/imc-pmu.c ref = &trace_imc_refc[core_id]; ref 1283 arch/powerpc/perf/imc-pmu.c if (!ref) ref 1287 arch/powerpc/perf/imc-pmu.c mutex_lock(&ref->lock); ref 1288 arch/powerpc/perf/imc-pmu.c ref->refc--; ref 1289 arch/powerpc/perf/imc-pmu.c if (ref->refc == 0) { ref 1292 arch/powerpc/perf/imc-pmu.c mutex_unlock(&ref->lock); ref 1296 arch/powerpc/perf/imc-pmu.c } else if (ref->refc < 0) { ref 1297 arch/powerpc/perf/imc-pmu.c ref->refc = 0; ref 1299 arch/powerpc/perf/imc-pmu.c mutex_unlock(&ref->lock); ref 368 arch/powerpc/platforms/cell/spufs/sched.c static struct spu *ctx_location(struct spu *ref, int offset, int node) ref 374 arch/powerpc/platforms/cell/spufs/sched.c list_for_each_entry(spu, ref->aff_list.prev, aff_list) { ref 382 arch/powerpc/platforms/cell/spufs/sched.c list_for_each_entry_reverse(spu, ref->aff_list.next, aff_list) { ref 544 arch/powerpc/platforms/powermac/pfunc_core.c struct kref ref; ref 553 arch/powerpc/platforms/powermac/pfunc_core.c struct pmf_device *dev = container_of(kref, struct pmf_device, ref); ref 559 arch/powerpc/platforms/powermac/pfunc_core.c kref_put(&dev->ref, pmf_release_device); ref 564 arch/powerpc/platforms/powermac/pfunc_core.c kref_get(&dev->ref); ref 650 arch/powerpc/platforms/powermac/pfunc_core.c kref_init(&func->ref); ref 728 arch/powerpc/platforms/powermac/pfunc_core.c kref_init(&dev->ref); ref 753 arch/powerpc/platforms/powermac/pfunc_core.c kref_get(&func->ref); ref 761 arch/powerpc/platforms/powermac/pfunc_core.c container_of(kref, struct pmf_function, ref); ref 768 arch/powerpc/platforms/powermac/pfunc_core.c kref_put(&func->ref, pmf_release_function); ref 1696 arch/x86/events/core.c EVENT_ATTR(ref-cycles, REF_CPU_CYCLES ); ref 2665 arch/x86/events/intel/core.c if (!atomic_read(&era->ref) || era->config == reg->config) { ref 2695 arch/x86/events/intel/core.c atomic_inc(&era->ref); ref 2734 arch/x86/events/intel/core.c atomic_dec(&era->ref); ref 161 arch/x86/events/intel/uncore.c if (!atomic_read(&er->ref) || ref 163 arch/x86/events/intel/uncore.c atomic_inc(&er->ref); ref 196 arch/x86/events/intel/uncore.c atomic_dec(&er->ref); ref 110 arch/x86/events/intel/uncore.h atomic_t ref; ref 563 arch/x86/events/intel/uncore_nhmex.c if (!atomic_read(&er->ref) || er->config == config) { ref 564 arch/x86/events/intel/uncore_nhmex.c atomic_inc(&er->ref); ref 590 arch/x86/events/intel/uncore_nhmex.c if (__BITS_VALUE(atomic_read(&er->ref), idx, 8)) { ref 597 arch/x86/events/intel/uncore_nhmex.c if (!atomic_read(&er->ref) || !((er->config ^ config) & mask)) { ref 598 arch/x86/events/intel/uncore_nhmex.c atomic_add(1 << (idx * 8), &er->ref); ref 620 arch/x86/events/intel/uncore_nhmex.c atomic_dec(&er->ref); ref 626 arch/x86/events/intel/uncore_nhmex.c atomic_sub(1 << (idx * 8), &er->ref); ref 1004 arch/x86/events/intel/uncore_nhmex.c if (!atomic_read(&er->ref) || er->config == reg1->config) { ref 1005 arch/x86/events/intel/uncore_nhmex.c atomic_inc(&er->ref); ref 1015 arch/x86/events/intel/uncore_nhmex.c if (!__BITS_VALUE(atomic_read(&er->ref), idx - 2, 8) || ref 1017 arch/x86/events/intel/uncore_nhmex.c atomic_add(1 << ((idx - 2) * 8), &er->ref); ref 1023 arch/x86/events/intel/uncore_nhmex.c if (!atomic_read(&er->ref) || ref 1027 arch/x86/events/intel/uncore_nhmex.c atomic_inc(&er->ref); ref 1079 arch/x86/events/intel/uncore_nhmex.c atomic_sub(1 << ((idx - 2) * 8), &er->ref); ref 1081 arch/x86/events/intel/uncore_nhmex.c atomic_dec(&er->ref); ref 852 arch/x86/events/intel/uncore_snbep.c atomic_sub(1 << (i * 6), &er->ref); ref 878 arch/x86/events/intel/uncore_snbep.c if (!__BITS_VALUE(atomic_read(&er->ref), i, 6) || ref 880 arch/x86/events/intel/uncore_snbep.c atomic_add(1 << (i * 6), &er->ref); ref 899 arch/x86/events/intel/uncore_snbep.c atomic_sub(1 << (i * 6), &er->ref); ref 1005 arch/x86/events/intel/uncore_snbep.c if (!__BITS_VALUE(atomic_read(&er->ref), idx, 8) || ref 1007 arch/x86/events/intel/uncore_snbep.c atomic_add(1 << (idx * 8), &er->ref); ref 1039 arch/x86/events/intel/uncore_snbep.c atomic_sub(1 << (reg1->idx * 8), &er->ref); ref 138 arch/x86/events/perf_event.h atomic_t ref; /* reference count */ ref 140 arch/x86/kernel/tsc_sync.c struct tsc_adjust *ref, *cur = this_cpu_ptr(&tsc_adjust); ref 176 arch/x86/kernel/tsc_sync.c ref = per_cpu_ptr(&tsc_adjust, refcpu); ref 181 arch/x86/kernel/tsc_sync.c if (bootval != ref->bootval) ref 190 arch/x86/kernel/tsc_sync.c if (bootval != ref->adjusted) { ref 191 arch/x86/kernel/tsc_sync.c cur->adjusted = ref->adjusted; ref 192 arch/x86/kernel/tsc_sync.c wrmsrl(MSR_IA32_TSC_ADJUST, ref->adjusted); ref 324 block/bfq-cgroup.c bfqg->ref++; ref 329 block/bfq-cgroup.c bfqg->ref--; ref 331 block/bfq-cgroup.c if (bfqg->ref == 0) ref 632 block/bfq-cgroup.c bfqq->ref++; ref 787 block/bfq-iosched.c bfqq->ref++; ref 1063 block/bfq-iosched.c return bfqq->ref - bfqq->allocated - bfqq->entity.on_st - ref 2527 block/bfq-iosched.c new_bfqq->ref += process_refs; ref 4831 block/bfq-iosched.c bfqq, bfqq->ref); ref 4833 block/bfq-iosched.c bfqq->ref--; ref 4834 block/bfq-iosched.c if (bfqq->ref) ref 4929 block/bfq-iosched.c bfq_log_bfqq(bfqd, bfqq, "exit_bfqq: %p, %d", bfqq, bfqq->ref); ref 5054 block/bfq-iosched.c bfqq->ref = 0; ref 5170 block/bfq-iosched.c bfqq->ref++; /* ref 5178 block/bfq-iosched.c bfqq, bfqq->ref); ref 5183 block/bfq-iosched.c bfqq->ref++; /* get a process reference to this queue */ ref 5184 block/bfq-iosched.c bfq_log_bfqq(bfqd, bfqq, "get_queue, at end: %p, %d", bfqq, bfqq->ref); ref 5417 block/bfq-iosched.c new_bfqq->ref++; ref 6157 block/bfq-iosched.c bfqq->ref++; ref 6159 block/bfq-iosched.c rq, bfqq, bfqq->ref); ref 6292 block/bfq-iosched.c bfqq, bfqq->ref); ref 6449 block/bfq-iosched.c bfqd->oom_bfqq.ref++; ref 230 block/bfq-iosched.h int ref; ref 898 block/bfq-iosched.h int ref; ref 536 block/bfq-wf2q.c bfqq->ref++; ref 538 block/bfq-wf2q.c bfqq, bfqq->ref); ref 1641 block/bfq-wf2q.c int ref = in_serv_bfqq->ref; ref 1643 block/bfq-wf2q.c if (ref == 1) ref 113 block/blk-cgroup.c static void blkg_release(struct percpu_ref *ref) ref 115 block/blk-cgroup.c struct blkcg_gq *blkg = container_of(ref, struct blkcg_gq, refcnt); ref 122 block/blk-core.c refcount_set(&rq->ref, 1); ref 452 block/blk-core.c static void blk_queue_usage_counter_release(struct percpu_ref *ref) ref 455 block/blk-core.c container_of(ref, struct request_queue, q_usage_counter); ref 219 block/blk-flush.c if (!refcount_dec_and_test(&flush_rq->ref)) { ref 352 block/blk-mq.c refcount_set(&rq->ref, 1); ref 535 block/blk-mq.c if (refcount_dec_and_test(&rq->ref)) ref 910 block/blk-mq.c if (!refcount_inc_not_zero(&rq->ref)) ref 924 block/blk-mq.c else if (refcount_dec_and_test(&rq->ref)) ref 259 block/partition-generic.c void __delete_partition(struct percpu_ref *ref) ref 261 block/partition-generic.c struct hd_struct *part = container_of(ref, struct hd_struct, ref); ref 52 crypto/asymmetric_keys/asymmetric_type.c key_ref_t ref; ref 85 crypto/asymmetric_keys/asymmetric_type.c ref = keyring_search(make_key_ref(keyring, 1), ref 87 crypto/asymmetric_keys/asymmetric_type.c if (IS_ERR(ref)) ref 88 crypto/asymmetric_keys/asymmetric_type.c pr_debug("Request for key '%s' err %ld\n", req, PTR_ERR(ref)); ref 91 crypto/asymmetric_keys/asymmetric_type.c if (IS_ERR(ref)) { ref 92 crypto/asymmetric_keys/asymmetric_type.c switch (PTR_ERR(ref)) { ref 99 crypto/asymmetric_keys/asymmetric_type.c return ERR_CAST(ref); ref 103 crypto/asymmetric_keys/asymmetric_type.c key = key_ref_to_ptr(ref); ref 63 drivers/acpi/pptt.c u32 *ref; ref 68 drivers/acpi/pptt.c ref = ACPI_ADD_PTR(u32, node, sizeof(struct acpi_pptt_processor)); ref 69 drivers/acpi/pptt.c ref += resource; ref 71 drivers/acpi/pptt.c return fetch_pptt_subtable(table_hdr, *ref); ref 1398 drivers/android/binder.c struct binder_ref *ref; ref 1401 drivers/android/binder.c ref = rb_entry(n, struct binder_ref, rb_node_desc); ref 1403 drivers/android/binder.c if (desc < ref->data.desc) { ref 1405 drivers/android/binder.c } else if (desc > ref->data.desc) { ref 1407 drivers/android/binder.c } else if (need_strong_ref && !ref->data.strong) { ref 1411 drivers/android/binder.c return ref; ref 1443 drivers/android/binder.c struct binder_ref *ref; ref 1448 drivers/android/binder.c ref = rb_entry(parent, struct binder_ref, rb_node_node); ref 1450 drivers/android/binder.c if (node < ref->node) ref 1452 drivers/android/binder.c else if (node > ref->node) ref 1455 drivers/android/binder.c return ref; ref 1469 drivers/android/binder.c ref = rb_entry(n, struct binder_ref, rb_node_desc); ref 1470 drivers/android/binder.c if (ref->data.desc > new_ref->data.desc) ref 1472 drivers/android/binder.c new_ref->data.desc = ref->data.desc + 1; ref 1478 drivers/android/binder.c ref = rb_entry(parent, struct binder_ref, rb_node_desc); ref 1480 drivers/android/binder.c if (new_ref->data.desc < ref->data.desc) ref 1482 drivers/android/binder.c else if (new_ref->data.desc > ref->data.desc) ref 1501 drivers/android/binder.c static void binder_cleanup_ref_olocked(struct binder_ref *ref) ref 1507 drivers/android/binder.c ref->proc->pid, ref->data.debug_id, ref->data.desc, ref 1508 drivers/android/binder.c ref->node->debug_id); ref 1510 drivers/android/binder.c rb_erase(&ref->rb_node_desc, &ref->proc->refs_by_desc); ref 1511 drivers/android/binder.c rb_erase(&ref->rb_node_node, &ref->proc->refs_by_node); ref 1513 drivers/android/binder.c binder_node_inner_lock(ref->node); ref 1514 drivers/android/binder.c if (ref->data.strong) ref 1515 drivers/android/binder.c binder_dec_node_nilocked(ref->node, 1, 1); ref 1517 drivers/android/binder.c hlist_del(&ref->node_entry); ref 1518 drivers/android/binder.c delete_node = binder_dec_node_nilocked(ref->node, 0, 1); ref 1519 drivers/android/binder.c binder_node_inner_unlock(ref->node); ref 1529 drivers/android/binder.c ref->node = NULL; ref 1532 drivers/android/binder.c if (ref->death) { ref 1535 drivers/android/binder.c ref->proc->pid, ref->data.debug_id, ref 1536 drivers/android/binder.c ref->data.desc); ref 1537 drivers/android/binder.c binder_dequeue_work(ref->proc, &ref->death->work); ref 1553 drivers/android/binder.c static int binder_inc_ref_olocked(struct binder_ref *ref, int strong, ref 1559 drivers/android/binder.c if (ref->data.strong == 0) { ref 1560 drivers/android/binder.c ret = binder_inc_node(ref->node, 1, 1, target_list); ref 1564 drivers/android/binder.c ref->data.strong++; ref 1566 drivers/android/binder.c if (ref->data.weak == 0) { ref 1567 drivers/android/binder.c ret = binder_inc_node(ref->node, 0, 1, target_list); ref 1571 drivers/android/binder.c ref->data.weak++; ref 1585 drivers/android/binder.c static bool binder_dec_ref_olocked(struct binder_ref *ref, int strong) ref 1588 drivers/android/binder.c if (ref->data.strong == 0) { ref 1590 drivers/android/binder.c ref->proc->pid, ref->data.debug_id, ref 1591 drivers/android/binder.c ref->data.desc, ref->data.strong, ref 1592 drivers/android/binder.c ref->data.weak); ref 1595 drivers/android/binder.c ref->data.strong--; ref 1596 drivers/android/binder.c if (ref->data.strong == 0) ref 1597 drivers/android/binder.c binder_dec_node(ref->node, strong, 1); ref 1599 drivers/android/binder.c if (ref->data.weak == 0) { ref 1601 drivers/android/binder.c ref->proc->pid, ref->data.debug_id, ref 1602 drivers/android/binder.c ref->data.desc, ref->data.strong, ref 1603 drivers/android/binder.c ref->data.weak); ref 1606 drivers/android/binder.c ref->data.weak--; ref 1608 drivers/android/binder.c if (ref->data.strong == 0 && ref->data.weak == 0) { ref 1609 drivers/android/binder.c binder_cleanup_ref_olocked(ref); ref 1632 drivers/android/binder.c struct binder_ref *ref; ref 1635 drivers/android/binder.c ref = binder_get_ref_olocked(proc, desc, need_strong_ref); ref 1636 drivers/android/binder.c if (!ref) ref 1638 drivers/android/binder.c node = ref->node; ref 1645 drivers/android/binder.c *rdata = ref->data; ref 1662 drivers/android/binder.c static void binder_free_ref(struct binder_ref *ref) ref 1664 drivers/android/binder.c if (ref->node) ref 1665 drivers/android/binder.c binder_free_node(ref->node); ref 1666 drivers/android/binder.c kfree(ref->death); ref 1667 drivers/android/binder.c kfree(ref); ref 1688 drivers/android/binder.c struct binder_ref *ref; ref 1692 drivers/android/binder.c ref = binder_get_ref_olocked(proc, desc, strong); ref 1693 drivers/android/binder.c if (!ref) { ref 1698 drivers/android/binder.c ret = binder_inc_ref_olocked(ref, strong, NULL); ref 1700 drivers/android/binder.c delete_ref = binder_dec_ref_olocked(ref, strong); ref 1703 drivers/android/binder.c *rdata = ref->data; ref 1707 drivers/android/binder.c binder_free_ref(ref); ref 1752 drivers/android/binder.c struct binder_ref *ref; ref 1757 drivers/android/binder.c ref = binder_get_ref_for_node_olocked(proc, node, NULL); ref 1758 drivers/android/binder.c if (!ref) { ref 1760 drivers/android/binder.c new_ref = kzalloc(sizeof(*ref), GFP_KERNEL); ref 1764 drivers/android/binder.c ref = binder_get_ref_for_node_olocked(proc, node, new_ref); ref 1766 drivers/android/binder.c ret = binder_inc_ref_olocked(ref, strong, target_list); ref 1767 drivers/android/binder.c *rdata = ref->data; ref 1769 drivers/android/binder.c if (new_ref && ref != new_ref) ref 2937 drivers/android/binder.c struct binder_ref *ref; ref 2947 drivers/android/binder.c ref = binder_get_ref_olocked(proc, tr->target.handle, ref 2949 drivers/android/binder.c if (ref) { ref 2951 drivers/android/binder.c ref->node, &target_proc, ref 3856 drivers/android/binder.c struct binder_ref *ref; ref 3886 drivers/android/binder.c ref = binder_get_ref_olocked(proc, target, false); ref 3887 drivers/android/binder.c if (ref == NULL) { ref 3905 drivers/android/binder.c (u64)cookie, ref->data.debug_id, ref 3906 drivers/android/binder.c ref->data.desc, ref->data.strong, ref 3907 drivers/android/binder.c ref->data.weak, ref->node->debug_id); ref 3909 drivers/android/binder.c binder_node_lock(ref->node); ref 3911 drivers/android/binder.c if (ref->death) { ref 3914 drivers/android/binder.c binder_node_unlock(ref->node); ref 3922 drivers/android/binder.c ref->death = death; ref 3923 drivers/android/binder.c if (ref->node->proc == NULL) { ref 3924 drivers/android/binder.c ref->death->work.type = BINDER_WORK_DEAD_BINDER; ref 3928 drivers/android/binder.c &ref->death->work, &proc->todo); ref 3933 drivers/android/binder.c if (ref->death == NULL) { ref 3936 drivers/android/binder.c binder_node_unlock(ref->node); ref 3940 drivers/android/binder.c death = ref->death; ref 3946 drivers/android/binder.c binder_node_unlock(ref->node); ref 3950 drivers/android/binder.c ref->death = NULL; ref 3973 drivers/android/binder.c binder_node_unlock(ref->node); ref 5233 drivers/android/binder.c refcount_inc(&binder_dev->ref); ref 5345 drivers/android/binder.c struct binder_ref *ref; ref 5375 drivers/android/binder.c hlist_for_each_entry(ref, &node->refs, node_entry) { ref 5383 drivers/android/binder.c binder_inner_proc_lock(ref->proc); ref 5384 drivers/android/binder.c if (!ref->death) { ref 5385 drivers/android/binder.c binder_inner_proc_unlock(ref->proc); ref 5391 drivers/android/binder.c BUG_ON(!list_empty(&ref->death->work.entry)); ref 5392 drivers/android/binder.c ref->death->work.type = BINDER_WORK_DEAD_BINDER; ref 5393 drivers/android/binder.c binder_enqueue_work_ilocked(&ref->death->work, ref 5394 drivers/android/binder.c &ref->proc->todo); ref 5395 drivers/android/binder.c binder_wakeup_proc_ilocked(ref->proc); ref 5396 drivers/android/binder.c binder_inner_proc_unlock(ref->proc); ref 5429 drivers/android/binder.c if (refcount_dec_and_test(&device->ref)) { ref 5477 drivers/android/binder.c struct binder_ref *ref; ref 5479 drivers/android/binder.c ref = rb_entry(n, struct binder_ref, rb_node_desc); ref 5481 drivers/android/binder.c binder_cleanup_ref_olocked(ref); ref 5483 drivers/android/binder.c binder_free_ref(ref); ref 5668 drivers/android/binder.c struct binder_ref *ref; ref 5673 drivers/android/binder.c hlist_for_each_entry(ref, &node->refs, node_entry) ref 5683 drivers/android/binder.c hlist_for_each_entry(ref, &node->refs, node_entry) ref 5684 drivers/android/binder.c seq_printf(m, " %d", ref->proc->pid); ref 5695 drivers/android/binder.c struct binder_ref *ref) ref 5697 drivers/android/binder.c binder_node_lock(ref->node); ref 5699 drivers/android/binder.c ref->data.debug_id, ref->data.desc, ref 5700 drivers/android/binder.c ref->node->proc ? "" : "dead ", ref 5701 drivers/android/binder.c ref->node->debug_id, ref->data.strong, ref 5702 drivers/android/binder.c ref->data.weak, ref->death); ref 5703 drivers/android/binder.c binder_node_unlock(ref->node); ref 5907 drivers/android/binder.c struct binder_ref *ref = rb_entry(n, struct binder_ref, ref 5910 drivers/android/binder.c strong += ref->data.strong; ref 5911 drivers/android/binder.c weak += ref->data.weak; ref 6090 drivers/android/binder.c refcount_set(&binder_device->ref, 1); ref 37 drivers/android/binder_internal.h refcount_t ref; ref 157 drivers/android/binderfs.c refcount_set(&device->ref, 1); ref 261 drivers/android/binderfs.c if (refcount_dec_and_test(&device->ref)) { ref 451 drivers/android/binderfs.c refcount_set(&device->ref, 1); ref 1863 drivers/base/core.c unsigned int ref; ref 1918 drivers/base/core.c ref = kref_read(&glue_dir->kref); ref 1919 drivers/base/core.c if (!kobject_has_children(glue_dir) && !--ref) ref 60 drivers/base/firmware_loader/firmware.h struct kref ref; ref 80 drivers/base/firmware_loader/main.c static inline struct fw_priv *to_fw_priv(struct kref *ref) ref 82 drivers/base/firmware_loader/main.c return container_of(ref, struct fw_priv, ref); ref 184 drivers/base/firmware_loader/main.c kref_init(&fw_priv->ref); ref 221 drivers/base/firmware_loader/main.c kref_get(&tmp->ref); ref 242 drivers/base/firmware_loader/main.c static void __free_fw_priv(struct kref *ref) ref 245 drivers/base/firmware_loader/main.c struct fw_priv *fw_priv = to_fw_priv(ref); ref 266 drivers/base/firmware_loader/main.c if (!kref_put(&fw_priv->ref, __free_fw_priv)) ref 671 drivers/base/firmware_loader/main.c kref_get(&fw_priv->ref); ref 573 drivers/base/swnode.c const struct software_node_reference *ref; ref 581 drivers/base/swnode.c for (ref = swnode->node->references; ref->name; ref++) ref 582 drivers/base/swnode.c if (!strcmp(ref->name, propname)) ref 585 drivers/base/swnode.c if (!ref->name || index > (ref->nrefs - 1)) ref 588 drivers/base/swnode.c refnode = software_node_fwnode(ref->refs[index].node); ref 607 drivers/base/swnode.c args->args[i] = ref->refs[index].args[i]; ref 168 drivers/block/aoe/aoe.h ulong ref; ref 128 drivers/block/aoe/aoeblk.c seq_printf(s, "ref: %ld\n", d->ref); ref 143 drivers/block/aoe/aoedev.c d->ref--; ref 342 drivers/block/aoe/aoedev.c || d->ref) ref 457 drivers/block/aoe/aoedev.c d->ref++; ref 482 drivers/block/aoe/aoedev.c d->ref = 1; ref 301 drivers/block/ataflop.c int ref; ref 1615 drivers/block/ataflop.c if (floppy->ref != 1 && floppy->ref != -1) ref 1719 drivers/block/ataflop.c if (floppy->ref != 1 && floppy->ref != -1) ref 1895 drivers/block/ataflop.c if (p->ref && p->type != type) ref 1898 drivers/block/ataflop.c if (p->ref == -1 || (p->ref && mode & FMODE_EXCL)) ref 1902 drivers/block/ataflop.c p->ref = -1; ref 1904 drivers/block/ataflop.c p->ref++; ref 1915 drivers/block/ataflop.c if (p->ref < 0) ref 1916 drivers/block/ataflop.c p->ref = 0; ref 1918 drivers/block/ataflop.c p->ref--; ref 1941 drivers/block/ataflop.c if (p->ref < 0) ref 1942 drivers/block/ataflop.c p->ref = 0; ref 1943 drivers/block/ataflop.c else if (!p->ref--) { ref 1945 drivers/block/ataflop.c p->ref = 0; ref 496 drivers/block/loop.c if (!atomic_dec_and_test(&cmd->ref)) ref 559 drivers/block/loop.c atomic_set(&cmd->ref, 2); ref 70 drivers/block/loop.h atomic_t ref; /* only for aio */ ref 902 drivers/block/xen-blkback/blkback.c persistent_gnt->gnt = map[new_map_idx].ref; ref 588 drivers/block/xen-blkfront.c int n, ref; ref 626 drivers/block/xen-blkfront.c ref = gnt_list_entry->gref; ref 659 drivers/block/xen-blkfront.c .gref = ref, ref 665 drivers/block/xen-blkfront.c .gref = ref, ref 78 drivers/char/hw_random/core.c struct hwrng *rng = container_of(kref, struct hwrng, ref); ref 109 drivers/char/hw_random/core.c kref_put(¤t_rng->ref, cleanup_rng); ref 123 drivers/char/hw_random/core.c kref_get(&rng->ref); ref 137 drivers/char/hw_random/core.c kref_put(&rng->ref, cleanup_rng); ref 143 drivers/char/hw_random/core.c if (kref_get_unless_zero(&rng->ref)) ref 154 drivers/char/hw_random/core.c kref_init(&rng->ref); ref 731 drivers/char/ipmi/ipmi_msghandler.c static void intf_free(struct kref *ref) ref 733 drivers/char/ipmi/ipmi_msghandler.c struct ipmi_smi *intf = container_of(ref, struct ipmi_smi, refcount); ref 1283 drivers/char/ipmi/ipmi_msghandler.c static void free_user(struct kref *ref) ref 1285 drivers/char/ipmi/ipmi_msghandler.c struct ipmi_user *user = container_of(ref, struct ipmi_user, refcount); ref 2929 drivers/char/ipmi/ipmi_msghandler.c cleanup_bmc_device(struct kref *ref) ref 2931 drivers/char/ipmi/ipmi_msghandler.c struct bmc_device *bmc = container_of(ref, struct bmc_device, usecount); ref 91 drivers/clk/clk.c struct kref ref; ref 3451 drivers/clk/clk.c kref_init(&core->ref); ref 3559 drivers/clk/clk.c kref_get(&core->ref); ref 3820 drivers/clk/clk.c static void __clk_release(struct kref *ref) ref 3822 drivers/clk/clk.c struct clk_core *core = container_of(ref, struct clk_core, ref); ref 3944 drivers/clk/clk.c kref_put(&clk->core->ref, __clk_release); ref 4110 drivers/clk/clk.c kref_put(&clk->core->ref, __clk_release); ref 79 drivers/clk/imx/clk-sccg-pll.c uint64_t ref; ref 195 drivers/clk/imx/clk-sccg-pll.c uint64_t ref) ref 200 drivers/clk/imx/clk-sccg-pll.c if (ref < PLL_STAGE1_MIN_FREQ || ref > PLL_STAGE1_MAX_FREQ) ref 203 drivers/clk/imx/clk-sccg-pll.c temp_setup->vco1 = ref; ref 216 drivers/clk/imx/clk-sccg-pll.c uint64_t vco1 = temp_setup->ref; ref 239 drivers/clk/imx/clk-sccg-pll.c temp_setup->ref_div1 = temp_setup->ref; ref 254 drivers/clk/imx/clk-sccg-pll.c uint64_t ref) ref 259 drivers/clk/imx/clk-sccg-pll.c if (ref < PLL_REF_MIN_FREQ || ref > PLL_REF_MAX_FREQ) ref 262 drivers/clk/imx/clk-sccg-pll.c temp_setup->ref = ref; ref 33 drivers/clk/mxs/clk-ref.c struct clk_ref *ref = to_clk_ref(hw); ref 35 drivers/clk/mxs/clk-ref.c writel_relaxed(1 << ((ref->idx + 1) * 8 - 1), ref->reg + CLR); ref 42 drivers/clk/mxs/clk-ref.c struct clk_ref *ref = to_clk_ref(hw); ref 44 drivers/clk/mxs/clk-ref.c writel_relaxed(1 << ((ref->idx + 1) * 8 - 1), ref->reg + SET); ref 50 drivers/clk/mxs/clk-ref.c struct clk_ref *ref = to_clk_ref(hw); ref 52 drivers/clk/mxs/clk-ref.c u8 frac = (readl_relaxed(ref->reg) >> (ref->idx * 8)) & 0x3f; ref 86 drivers/clk/mxs/clk-ref.c struct clk_ref *ref = to_clk_ref(hw); ref 90 drivers/clk/mxs/clk-ref.c u8 frac, shift = ref->idx * 8; ref 103 drivers/clk/mxs/clk-ref.c val = readl_relaxed(ref->reg); ref 106 drivers/clk/mxs/clk-ref.c writel_relaxed(val, ref->reg); ref 124 drivers/clk/mxs/clk-ref.c struct clk_ref *ref; ref 128 drivers/clk/mxs/clk-ref.c ref = kzalloc(sizeof(*ref), GFP_KERNEL); ref 129 drivers/clk/mxs/clk-ref.c if (!ref) ref 138 drivers/clk/mxs/clk-ref.c ref->reg = reg; ref 139 drivers/clk/mxs/clk-ref.c ref->idx = idx; ref 140 drivers/clk/mxs/clk-ref.c ref->hw.init = &init; ref 142 drivers/clk/mxs/clk-ref.c clk = clk_register(NULL, &ref->hw); ref 144 drivers/clk/mxs/clk-ref.c kfree(ref); ref 238 drivers/clk/versatile/clk-icst.c icst->params->ref = parent_rate; ref 335 drivers/clk/versatile/clk-icst.c icst->params->ref = parent_rate; ref 44 drivers/clk/versatile/clk-impd1.c .ref = 24000000, /* 24 MHz */ ref 62 drivers/clk/versatile/clk-impd1.c .ref = 24000000, /* 24 MHz */ ref 29 drivers/clk/versatile/icst.c u64 dividend = p->ref * 2 * (u64)(vco.v + 8); ref 79 drivers/clk/versatile/icst.c fref_div = (2 * p->ref) / rd; ref 13 drivers/clk/versatile/icst.h unsigned long ref; ref 883 drivers/crypto/ccp/psp-dev.c static void sev_exit(struct kref *ref) ref 885 drivers/crypto/ccp/psp-dev.c struct sev_misc_dev *misc_dev = container_of(ref, struct sev_misc_dev, refcount); ref 432 drivers/crypto/chelsio/chtls/chtls.h static inline void chtls_sock_free(struct kref *ref) ref 434 drivers/crypto/chelsio/chtls/chtls.h struct chtls_sock *csk = container_of(ref, struct chtls_sock, ref 77 drivers/crypto/chelsio/chtls/chtls_cm.c static void chtls_sock_release(struct kref *ref) ref 80 drivers/crypto/chelsio/chtls/chtls_cm.c container_of(ref, struct chtls_sock, kref); ref 189 drivers/dma/fsl_raid.h __be32 ref; ref 1375 drivers/dma/ppc4xx/adma.c struct ppc_dma_chan_ref *ref; ref 1401 drivers/dma/ppc4xx/adma.c list_for_each_entry(ref, &ppc440spe_adma_chan_list, node) { ref 1402 drivers/dma/ppc4xx/adma.c if (dma_has_cap(cap, ref->chan->device->cap_mask)) { ref 1405 drivers/dma/ppc4xx/adma.c rank = ppc440spe_adma_estimate(ref->chan, cap, dst_lst, ref 1409 drivers/dma/ppc4xx/adma.c best_chan = ref->chan; ref 4013 drivers/dma/ppc4xx/adma.c struct ppc_dma_chan_ref *ref, *_ref; ref 4168 drivers/dma/ppc4xx/adma.c ref = kmalloc(sizeof(*ref), GFP_KERNEL); ref 4169 drivers/dma/ppc4xx/adma.c if (ref) { ref 4170 drivers/dma/ppc4xx/adma.c ref->chan = &chan->common; ref 4171 drivers/dma/ppc4xx/adma.c INIT_LIST_HEAD(&ref->node); ref 4172 drivers/dma/ppc4xx/adma.c list_add_tail(&ref->node, &ppc440spe_adma_chan_list); ref 4197 drivers/dma/ppc4xx/adma.c list_for_each_entry_safe(ref, _ref, &ppc440spe_adma_chan_list, node) { ref 4198 drivers/dma/ppc4xx/adma.c if (chan == to_ppc440spe_adma_chan(ref->chan)) { ref 4199 drivers/dma/ppc4xx/adma.c list_del(&ref->node); ref 4200 drivers/dma/ppc4xx/adma.c kfree(ref); ref 4243 drivers/dma/ppc4xx/adma.c struct ppc_dma_chan_ref *ref, *_ref; ref 4264 drivers/dma/ppc4xx/adma.c list_for_each_entry_safe(ref, _ref, &ppc440spe_adma_chan_list, ref 4267 drivers/dma/ppc4xx/adma.c to_ppc440spe_adma_chan(ref->chan)) { ref 4268 drivers/dma/ppc4xx/adma.c list_del(&ref->node); ref 4269 drivers/dma/ppc4xx/adma.c kfree(ref); ref 47 drivers/gpu/drm/amd/amdgpu/amdgpu_bo_list.c static void amdgpu_bo_list_free(struct kref *ref) ref 49 drivers/gpu/drm/amd/amdgpu/amdgpu_bo_list.c struct amdgpu_bo_list *list = container_of(ref, struct amdgpu_bo_list, ref 207 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c static void amdgpu_ctx_fini(struct kref *ref) ref 209 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c struct amdgpu_ctx *ctx = container_of(ref, struct amdgpu_ctx, refcount); ref 284 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c static void amdgpu_ctx_do_release(struct kref *ref) ref 290 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c ctx = container_of(ref, struct amdgpu_ctx, refcount); ref 296 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c amdgpu_ctx_fini(ref); ref 393 drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c uint32_t ref, uint32_t mask) ref 395 drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c amdgpu_ring_emit_wreg(ring, reg0, ref); ref 168 drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h uint32_t ref, uint32_t mask); ref 142 drivers/gpu/drm/amd/amdgpu/amdgpu_virt.c uint32_t ref, uint32_t mask) ref 153 drivers/gpu/drm/amd/amdgpu/amdgpu_virt.c ref, mask); ref 298 drivers/gpu/drm/amd/amdgpu/amdgpu_virt.h uint32_t ref, uint32_t mask); ref 413 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c uint32_t addr1, uint32_t ref, uint32_t mask, ref 428 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c amdgpu_ring_write(ring, ref); ref 4837 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c uint32_t ref, uint32_t mask) ref 4847 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c ref, mask, 0x20); ref 4850 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c ref, mask); ref 819 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c uint32_t addr1, uint32_t ref, uint32_t mask, ref 834 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c amdgpu_ring_write(ring, ref); ref 5452 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c uint32_t ref, uint32_t mask) ref 5461 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c ref, mask, 0x20); ref 5464 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c ref, mask); ref 717 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c uint32_t ref, uint32_t mask, ref 733 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c amdgpu_ring_write(ring, ref); /* reference */ ref 1197 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c uint32_t ref, uint32_t mask) ref 1199 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c amdgpu_ring_emit_wreg(ring, reg0, ref); ref 673 drivers/gpu/drm/amd/amdkfd/kfd_priv.h struct kref ref; ref 64 drivers/gpu/drm/amd/amdkfd/kfd_process.c static void kfd_process_ref_release(struct kref *ref); ref 376 drivers/gpu/drm/amd/amdkfd/kfd_process.c kref_put(&p->ref, kfd_process_ref_release); ref 481 drivers/gpu/drm/amd/amdkfd/kfd_process.c static void kfd_process_ref_release(struct kref *ref) ref 483 drivers/gpu/drm/amd/amdkfd/kfd_process.c struct kfd_process *p = container_of(ref, struct kfd_process, ref); ref 631 drivers/gpu/drm/amd/amdkfd/kfd_process.c kref_init(&process->ref); ref 914 drivers/gpu/drm/amd/amdkfd/kfd_process.c kref_get(&p->ref); ref 934 drivers/gpu/drm/amd/amdkfd/kfd_process.c kref_get(&p->ref); ref 48 drivers/gpu/drm/drm_atomic.c container_of(kref, struct drm_crtc_commit, ref); ref 84 drivers/gpu/drm/drm_atomic.c kref_init(&state->ref); ref 256 drivers/gpu/drm/drm_atomic.c void __drm_atomic_state_free(struct kref *ref) ref 258 drivers/gpu/drm/drm_atomic.c struct drm_atomic_state *state = container_of(ref, typeof(*state), ref); ref 1944 drivers/gpu/drm/drm_atomic_helper.c kref_init(&commit->ref); ref 968 drivers/gpu/drm/drm_atomic_uapi.c struct drm_mode_object *ref; ref 971 drivers/gpu/drm/drm_atomic_uapi.c if (!drm_property_change_valid_get(prop, prop_value, &ref)) ref 1024 drivers/gpu/drm/drm_atomic_uapi.c drm_property_change_valid_put(prop, ref); ref 124 drivers/gpu/drm/drm_crtc_internal.h struct drm_mode_object **ref); ref 126 drivers/gpu/drm/drm_crtc_internal.h struct drm_mode_object *ref); ref 97 drivers/gpu/drm/drm_dp_aux_dev.c static void release_drm_dp_aux_dev(struct kref *ref) ref 100 drivers/gpu/drm/drm_dp_aux_dev.c container_of(ref, struct drm_dp_aux_dev, refcount); ref 644 drivers/gpu/drm/drm_drv.c kref_init(&dev->ref); ref 839 drivers/gpu/drm/drm_drv.c static void drm_dev_release(struct kref *ref) ref 841 drivers/gpu/drm/drm_drv.c struct drm_device *dev = container_of(ref, struct drm_device, ref); ref 866 drivers/gpu/drm/drm_drv.c kref_get(&dev->ref); ref 880 drivers/gpu/drm/drm_drv.c kref_put(&dev->ref, drm_dev_release); ref 437 drivers/gpu/drm/drm_mode_object.c struct drm_mode_object *ref; ref 440 drivers/gpu/drm/drm_mode_object.c if (!drm_property_change_valid_get(prop, prop_value, &ref)) ref 456 drivers/gpu/drm/drm_mode_object.c drm_property_change_valid_put(prop, ref); ref 878 drivers/gpu/drm/drm_property.c uint64_t value, struct drm_mode_object **ref) ref 885 drivers/gpu/drm/drm_property.c *ref = NULL; ref 912 drivers/gpu/drm/drm_property.c *ref = &blob->base; ref 922 drivers/gpu/drm/drm_property.c *ref = __drm_mode_object_find(property->dev, NULL, value, ref 924 drivers/gpu/drm/drm_property.c return *ref != NULL; ref 934 drivers/gpu/drm/drm_property.c struct drm_mode_object *ref) ref 936 drivers/gpu/drm/drm_property.c if (!ref) ref 940 drivers/gpu/drm/drm_property.c drm_mode_object_put(ref); ref 942 drivers/gpu/drm/drm_property.c drm_property_blob_put(obj_to_blob(ref)); ref 1445 drivers/gpu/drm/drm_vblank.c static inline bool vblank_passed(u64 seq, u64 ref) ref 1447 drivers/gpu/drm/drm_vblank.c return (seq - ref) <= (1 << 23); ref 827 drivers/gpu/drm/i915/display/intel_cdclk.c cdclk_state->ref = 24000; ref 869 drivers/gpu/drm/i915/display/intel_cdclk.c cdclk_state->cdclk = cdclk_state->bypass = cdclk_state->ref; ref 1215 drivers/gpu/drm/i915/display/intel_cdclk.c return dev_priv->cdclk.hw.ref * ratio; ref 1236 drivers/gpu/drm/i915/display/intel_cdclk.c return dev_priv->cdclk.hw.ref * ratio; ref 1244 drivers/gpu/drm/i915/display/intel_cdclk.c cdclk_state->ref = 19200; ref 1255 drivers/gpu/drm/i915/display/intel_cdclk.c cdclk_state->vco = (val & BXT_DE_PLL_RATIO_MASK) * cdclk_state->ref; ref 1266 drivers/gpu/drm/i915/display/intel_cdclk.c cdclk_state->cdclk = cdclk_state->bypass = cdclk_state->ref; ref 1317 drivers/gpu/drm/i915/display/intel_cdclk.c int ratio = DIV_ROUND_CLOSEST(vco, dev_priv->cdclk.hw.ref); ref 1534 drivers/gpu/drm/i915/display/intel_cdclk.c cdclk_state->ref = 24000; ref 1536 drivers/gpu/drm/i915/display/intel_cdclk.c cdclk_state->ref = 19200; ref 1547 drivers/gpu/drm/i915/display/intel_cdclk.c cdclk_state->vco = (val & CNL_CDCLK_PLL_RATIO_MASK) * cdclk_state->ref; ref 1558 drivers/gpu/drm/i915/display/intel_cdclk.c cdclk_state->cdclk = cdclk_state->bypass = cdclk_state->ref; ref 1605 drivers/gpu/drm/i915/display/intel_cdclk.c int ratio = DIV_ROUND_CLOSEST(vco, dev_priv->cdclk.hw.ref); ref 1697 drivers/gpu/drm/i915/display/intel_cdclk.c ratio = dev_priv->cdclk.hw.ref == 19200 ? 35 : 28; ref 1700 drivers/gpu/drm/i915/display/intel_cdclk.c ratio = dev_priv->cdclk.hw.ref == 19200 ? 55 : 44; ref 1704 drivers/gpu/drm/i915/display/intel_cdclk.c return dev_priv->cdclk.hw.ref * ratio; ref 1749 drivers/gpu/drm/i915/display/intel_cdclk.c static int icl_calc_cdclk(int min_cdclk, unsigned int ref) ref 1756 drivers/gpu/drm/i915/display/intel_cdclk.c switch (ref) { ref 1758 drivers/gpu/drm/i915/display/intel_cdclk.c MISSING_CASE(ref); ref 1795 drivers/gpu/drm/i915/display/intel_cdclk.c WARN_ON(dev_priv->cdclk.hw.ref != 19200 && ref 1796 drivers/gpu/drm/i915/display/intel_cdclk.c dev_priv->cdclk.hw.ref != 38400); ref 1802 drivers/gpu/drm/i915/display/intel_cdclk.c WARN_ON(dev_priv->cdclk.hw.ref != 24000); ref 1805 drivers/gpu/drm/i915/display/intel_cdclk.c WARN_ON(dev_priv->cdclk.hw.ref != 19200 && ref 1806 drivers/gpu/drm/i915/display/intel_cdclk.c dev_priv->cdclk.hw.ref != 38400 && ref 1807 drivers/gpu/drm/i915/display/intel_cdclk.c dev_priv->cdclk.hw.ref != 24000); ref 1811 drivers/gpu/drm/i915/display/intel_cdclk.c ratio = cdclk / (dev_priv->cdclk.hw.ref / 2); ref 1813 drivers/gpu/drm/i915/display/intel_cdclk.c return dev_priv->cdclk.hw.ref * ratio; ref 1893 drivers/gpu/drm/i915/display/intel_cdclk.c cdclk_state->ref = 24000; ref 1896 drivers/gpu/drm/i915/display/intel_cdclk.c cdclk_state->ref = 19200; ref 1899 drivers/gpu/drm/i915/display/intel_cdclk.c cdclk_state->ref = 38400; ref 1915 drivers/gpu/drm/i915/display/intel_cdclk.c cdclk_state->vco = (val & BXT_DE_PLL_RATIO_MASK) * cdclk_state->ref; ref 1958 drivers/gpu/drm/i915/display/intel_cdclk.c sanitized_state.ref = dev_priv->cdclk.hw.ref; ref 1959 drivers/gpu/drm/i915/display/intel_cdclk.c sanitized_state.cdclk = icl_calc_cdclk(0, sanitized_state.ref); ref 2064 drivers/gpu/drm/i915/display/intel_cdclk.c a->ref != b->ref; ref 2086 drivers/gpu/drm/i915/display/intel_cdclk.c a->ref == b->ref; ref 2129 drivers/gpu/drm/i915/display/intel_cdclk.c cdclk_state->ref, cdclk_state->bypass, ref 2565 drivers/gpu/drm/i915/display/intel_cdclk.c unsigned int ref = state->cdclk.logical.ref; ref 2572 drivers/gpu/drm/i915/display/intel_cdclk.c cdclk = icl_calc_cdclk(min_cdclk, ref); ref 2582 drivers/gpu/drm/i915/display/intel_cdclk.c cdclk = icl_calc_cdclk(state->cdclk.force_min_cdclk, ref); ref 2624 drivers/gpu/drm/i915/display/intel_cdclk.c if (dev_priv->cdclk.hw.ref == 24000) ref 2629 drivers/gpu/drm/i915/display/intel_cdclk.c if (dev_priv->cdclk.hw.ref == 24000) ref 1414 drivers/gpu/drm/i915/display/intel_ddi.c ref_clock = dev_priv->cdclk.hw.ref; ref 2259 drivers/gpu/drm/i915/display/intel_dpll_mgr.c int ref_clock = dev_priv->cdclk.hw.ref; ref 2543 drivers/gpu/drm/i915/display/intel_dpll_mgr.c dev_priv->cdclk.hw.ref == 24000 ? ref 2565 drivers/gpu/drm/i915/display/intel_dpll_mgr.c *pll_params = dev_priv->cdclk.hw.ref == 24000 ? ref 2701 drivers/gpu/drm/i915/display/intel_dpll_mgr.c int refclk_khz = dev_priv->cdclk.hw.ref; ref 3087 drivers/gpu/drm/i915/display/intel_dpll_mgr.c if (dev_priv->cdclk.hw.ref == 38400) { ref 200 drivers/gpu/drm/i915/display/intel_frontbuffer.c static int frontbuffer_active(struct i915_active *ref) ref 203 drivers/gpu/drm/i915/display/intel_frontbuffer.c container_of(ref, typeof(*front), write); ref 205 drivers/gpu/drm/i915/display/intel_frontbuffer.c kref_get(&front->ref); ref 209 drivers/gpu/drm/i915/display/intel_frontbuffer.c static void frontbuffer_retire(struct i915_active *ref) ref 212 drivers/gpu/drm/i915/display/intel_frontbuffer.c container_of(ref, typeof(*front), write); ref 218 drivers/gpu/drm/i915/display/intel_frontbuffer.c static void frontbuffer_release(struct kref *ref) ref 222 drivers/gpu/drm/i915/display/intel_frontbuffer.c container_of(ref, typeof(*front), ref); ref 240 drivers/gpu/drm/i915/display/intel_frontbuffer.c kref_get(&front->ref); ref 250 drivers/gpu/drm/i915/display/intel_frontbuffer.c kref_init(&front->ref); ref 259 drivers/gpu/drm/i915/display/intel_frontbuffer.c kref_get(&front->ref); ref 271 drivers/gpu/drm/i915/display/intel_frontbuffer.c kref_put_lock(&front->ref, ref 44 drivers/gpu/drm/i915/display/intel_frontbuffer.h struct kref ref; ref 372 drivers/gpu/drm/i915/gem/i915_gem_context.c void i915_gem_context_release(struct kref *ref) ref 374 drivers/gpu/drm/i915/gem/i915_gem_context.c struct i915_gem_context *ctx = container_of(ref, typeof(*ctx), ref); ref 418 drivers/gpu/drm/i915/gem/i915_gem_context.c kref_init(&ctx->ref); ref 1982 drivers/gpu/drm/i915/gem/i915_gem_context.c if (!kref_get_unless_zero(&vm->ref)) ref 167 drivers/gpu/drm/i915/gem/i915_gem_context.h kref_get(&ctx->ref); ref 173 drivers/gpu/drm/i915/gem/i915_gem_context.h kref_put(&ctx->ref, i915_gem_context_release); ref 125 drivers/gpu/drm/i915/gem/i915_gem_context_types.h struct kref ref; ref 22 drivers/gpu/drm/i915/gem/selftests/mock_context.c kref_init(&ctx->ref); ref 224 drivers/gpu/drm/i915/gt/intel_context.c kref_init(&ce->ref); ref 116 drivers/gpu/drm/i915/gt/intel_context.h kref_get(&ce->ref); ref 122 drivers/gpu/drm/i915/gt/intel_context.h kref_put(&ce->ref, ce->ops->destroy); ref 39 drivers/gpu/drm/i915/gt/intel_context_types.h struct kref ref; ref 215 drivers/gpu/drm/i915/gt/intel_engine.h void intel_ring_free(struct kref *ref); ref 219 drivers/gpu/drm/i915/gt/intel_engine.h kref_get(&ring->ref); ref 225 drivers/gpu/drm/i915/gt/intel_engine.h kref_put(&ring->ref, intel_ring_free); ref 42 drivers/gpu/drm/i915/gt/intel_engine_pool.c static int pool_active(struct i915_active *ref) ref 45 drivers/gpu/drm/i915/gt/intel_engine_pool.c container_of(ref, typeof(*node), active); ref 64 drivers/gpu/drm/i915/gt/intel_engine_pool.c static void pool_retire(struct i915_active *ref) ref 67 drivers/gpu/drm/i915/gt/intel_engine_pool.c container_of(ref, typeof(*node), active); ref 88 drivers/gpu/drm/i915/gt/intel_engine_types.h struct kref ref; ref 1713 drivers/gpu/drm/i915/gt/intel_lrc.c struct intel_context *ce = container_of(kref, typeof(*ce), ref); ref 3421 drivers/gpu/drm/i915/gt/intel_lrc.c container_of(kref, typeof(*ve), context.ref); ref 1312 drivers/gpu/drm/i915/gt/intel_ringbuffer.c kref_init(&ring->ref); ref 1337 drivers/gpu/drm/i915/gt/intel_ringbuffer.c void intel_ring_free(struct kref *ref) ref 1339 drivers/gpu/drm/i915/gt/intel_ringbuffer.c struct intel_ring *ring = container_of(ref, typeof(*ring), ref); ref 1352 drivers/gpu/drm/i915/gt/intel_ringbuffer.c static void ring_context_destroy(struct kref *ref) ref 1354 drivers/gpu/drm/i915/gt/intel_ringbuffer.c struct intel_context *ce = container_of(ref, typeof(*ce), ref); ref 55 drivers/gpu/drm/i915/gt/mock_engine.c kref_init(&ring->ref); ref 116 drivers/gpu/drm/i915/gt/mock_engine.c static void mock_context_destroy(struct kref *ref) ref 118 drivers/gpu/drm/i915/gt/mock_engine.c struct intel_context *ce = container_of(ref, typeof(*ce), ref); ref 1856 drivers/gpu/drm/i915/gvt/gtt.c kref_init(&mm->ref); ref 1952 drivers/gpu/drm/i915/gvt/gtt.c struct intel_vgpu_mm *mm = container_of(mm_ref, typeof(*mm), ref); ref 146 drivers/gpu/drm/i915/gvt/gtt.h struct kref ref; ref 176 drivers/gpu/drm/i915/gvt/gtt.h kref_get(&mm->ref); ref 183 drivers/gpu/drm/i915/gvt/gtt.h kref_put(&mm->ref, _intel_vgpu_mm_release); ref 108 drivers/gpu/drm/i915/gvt/kvmgt.c struct kref ref; ref 270 drivers/gpu/drm/i915/gvt/kvmgt.c kref_init(&new->ref); ref 1922 drivers/gpu/drm/i915/gvt/kvmgt.c kref_get(&entry->ref); ref 1936 drivers/gpu/drm/i915/gvt/kvmgt.c static void __gvt_dma_release(struct kref *ref) ref 1938 drivers/gpu/drm/i915/gvt/kvmgt.c struct gvt_dma *entry = container_of(ref, typeof(*entry), ref); ref 1958 drivers/gpu/drm/i915/gvt/kvmgt.c kref_put(&entry->ref, __gvt_dma_release); ref 15 drivers/gpu/drm/i915/i915_active.c #define BKL(ref) (&(ref)->i915->drm.struct_mutex) ref 31 drivers/gpu/drm/i915/i915_active.c struct i915_active *ref; ref 78 drivers/gpu/drm/i915/i915_active.c struct i915_active *ref = addr; ref 80 drivers/gpu/drm/i915/i915_active.c return (void *)ref->active ?: (void *)ref->retire ?: (void *)ref; ref 88 drivers/gpu/drm/i915/i915_active.c static void debug_active_init(struct i915_active *ref) ref 90 drivers/gpu/drm/i915/i915_active.c debug_object_init(ref, &active_debug_desc); ref 93 drivers/gpu/drm/i915/i915_active.c static void debug_active_activate(struct i915_active *ref) ref 95 drivers/gpu/drm/i915/i915_active.c debug_object_activate(ref, &active_debug_desc); ref 98 drivers/gpu/drm/i915/i915_active.c static void debug_active_deactivate(struct i915_active *ref) ref 100 drivers/gpu/drm/i915/i915_active.c debug_object_deactivate(ref, &active_debug_desc); ref 103 drivers/gpu/drm/i915/i915_active.c static void debug_active_fini(struct i915_active *ref) ref 105 drivers/gpu/drm/i915/i915_active.c debug_object_free(ref, &active_debug_desc); ref 108 drivers/gpu/drm/i915/i915_active.c static void debug_active_assert(struct i915_active *ref) ref 110 drivers/gpu/drm/i915/i915_active.c debug_object_assert_init(ref, &active_debug_desc); ref 115 drivers/gpu/drm/i915/i915_active.c static inline void debug_active_init(struct i915_active *ref) { } ref 116 drivers/gpu/drm/i915/i915_active.c static inline void debug_active_activate(struct i915_active *ref) { } ref 117 drivers/gpu/drm/i915/i915_active.c static inline void debug_active_deactivate(struct i915_active *ref) { } ref 118 drivers/gpu/drm/i915/i915_active.c static inline void debug_active_fini(struct i915_active *ref) { } ref 119 drivers/gpu/drm/i915/i915_active.c static inline void debug_active_assert(struct i915_active *ref) { } ref 124 drivers/gpu/drm/i915/i915_active.c __active_retire(struct i915_active *ref, bool lock) ref 130 drivers/gpu/drm/i915/i915_active.c lockdep_assert_held(&ref->mutex); ref 133 drivers/gpu/drm/i915/i915_active.c if (atomic_dec_and_test(&ref->count)) { ref 134 drivers/gpu/drm/i915/i915_active.c debug_active_deactivate(ref); ref 135 drivers/gpu/drm/i915/i915_active.c root = ref->tree; ref 136 drivers/gpu/drm/i915/i915_active.c ref->tree = RB_ROOT; ref 137 drivers/gpu/drm/i915/i915_active.c ref->cache = NULL; ref 142 drivers/gpu/drm/i915/i915_active.c mutex_unlock(&ref->mutex); ref 152 drivers/gpu/drm/i915/i915_active.c if (ref->retire) ref 153 drivers/gpu/drm/i915/i915_active.c ref->retire(ref); ref 157 drivers/gpu/drm/i915/i915_active.c active_retire(struct i915_active *ref, bool lock) ref 159 drivers/gpu/drm/i915/i915_active.c GEM_BUG_ON(!atomic_read(&ref->count)); ref 160 drivers/gpu/drm/i915/i915_active.c if (atomic_add_unless(&ref->count, -1, 1)) ref 165 drivers/gpu/drm/i915/i915_active.c mutex_lock_nested(&ref->mutex, SINGLE_DEPTH_NESTING); ref 166 drivers/gpu/drm/i915/i915_active.c __active_retire(ref, lock); ref 172 drivers/gpu/drm/i915/i915_active.c active_retire(node_from_active(base)->ref, true); ref 178 drivers/gpu/drm/i915/i915_active.c active_retire(node_from_active(base)->ref, false); ref 182 drivers/gpu/drm/i915/i915_active.c active_instance(struct i915_active *ref, struct intel_timeline *tl) ref 195 drivers/gpu/drm/i915/i915_active.c node = READ_ONCE(ref->cache); ref 204 drivers/gpu/drm/i915/i915_active.c mutex_lock(&ref->mutex); ref 205 drivers/gpu/drm/i915/i915_active.c GEM_BUG_ON(i915_active_is_idle(ref)); ref 208 drivers/gpu/drm/i915/i915_active.c p = &ref->tree.rb_node; ref 226 drivers/gpu/drm/i915/i915_active.c node->ref = ref; ref 230 drivers/gpu/drm/i915/i915_active.c rb_insert_color(&node->node, &ref->tree); ref 233 drivers/gpu/drm/i915/i915_active.c ref->cache = node; ref 234 drivers/gpu/drm/i915/i915_active.c mutex_unlock(&ref->mutex); ref 241 drivers/gpu/drm/i915/i915_active.c struct i915_active *ref, ref 242 drivers/gpu/drm/i915/i915_active.c int (*active)(struct i915_active *ref), ref 243 drivers/gpu/drm/i915/i915_active.c void (*retire)(struct i915_active *ref), ref 246 drivers/gpu/drm/i915/i915_active.c debug_active_init(ref); ref 248 drivers/gpu/drm/i915/i915_active.c ref->i915 = i915; ref 249 drivers/gpu/drm/i915/i915_active.c ref->flags = 0; ref 250 drivers/gpu/drm/i915/i915_active.c ref->active = active; ref 251 drivers/gpu/drm/i915/i915_active.c ref->retire = retire; ref 252 drivers/gpu/drm/i915/i915_active.c ref->tree = RB_ROOT; ref 253 drivers/gpu/drm/i915/i915_active.c ref->cache = NULL; ref 254 drivers/gpu/drm/i915/i915_active.c init_llist_head(&ref->preallocated_barriers); ref 255 drivers/gpu/drm/i915/i915_active.c atomic_set(&ref->count, 0); ref 256 drivers/gpu/drm/i915/i915_active.c __mutex_init(&ref->mutex, "i915_active", key); ref 259 drivers/gpu/drm/i915/i915_active.c static bool ____active_del_barrier(struct i915_active *ref, ref 302 drivers/gpu/drm/i915/i915_active.c __active_del_barrier(struct i915_active *ref, struct active_node *node) ref 304 drivers/gpu/drm/i915/i915_active.c return ____active_del_barrier(ref, node, barrier_to_engine(node)); ref 307 drivers/gpu/drm/i915/i915_active.c int i915_active_ref(struct i915_active *ref, ref 317 drivers/gpu/drm/i915/i915_active.c err = i915_active_acquire(ref); ref 321 drivers/gpu/drm/i915/i915_active.c active = active_instance(ref, tl); ref 333 drivers/gpu/drm/i915/i915_active.c __active_del_barrier(ref, node_from_active(active)); ref 338 drivers/gpu/drm/i915/i915_active.c atomic_inc(&ref->count); ref 340 drivers/gpu/drm/i915/i915_active.c GEM_BUG_ON(!atomic_read(&ref->count)); ref 344 drivers/gpu/drm/i915/i915_active.c i915_active_release(ref); ref 348 drivers/gpu/drm/i915/i915_active.c int i915_active_acquire(struct i915_active *ref) ref 352 drivers/gpu/drm/i915/i915_active.c debug_active_assert(ref); ref 353 drivers/gpu/drm/i915/i915_active.c if (atomic_add_unless(&ref->count, 1, 0)) ref 356 drivers/gpu/drm/i915/i915_active.c err = mutex_lock_interruptible(&ref->mutex); ref 360 drivers/gpu/drm/i915/i915_active.c if (!atomic_read(&ref->count) && ref->active) ref 361 drivers/gpu/drm/i915/i915_active.c err = ref->active(ref); ref 363 drivers/gpu/drm/i915/i915_active.c debug_active_activate(ref); ref 364 drivers/gpu/drm/i915/i915_active.c atomic_inc(&ref->count); ref 367 drivers/gpu/drm/i915/i915_active.c mutex_unlock(&ref->mutex); ref 372 drivers/gpu/drm/i915/i915_active.c void i915_active_release(struct i915_active *ref) ref 374 drivers/gpu/drm/i915/i915_active.c debug_active_assert(ref); ref 375 drivers/gpu/drm/i915/i915_active.c active_retire(ref, true); ref 378 drivers/gpu/drm/i915/i915_active.c static void __active_ungrab(struct i915_active *ref) ref 380 drivers/gpu/drm/i915/i915_active.c clear_and_wake_up_bit(I915_ACTIVE_GRAB_BIT, &ref->flags); ref 383 drivers/gpu/drm/i915/i915_active.c bool i915_active_trygrab(struct i915_active *ref) ref 385 drivers/gpu/drm/i915/i915_active.c debug_active_assert(ref); ref 387 drivers/gpu/drm/i915/i915_active.c if (test_and_set_bit(I915_ACTIVE_GRAB_BIT, &ref->flags)) ref 390 drivers/gpu/drm/i915/i915_active.c if (!atomic_add_unless(&ref->count, 1, 0)) { ref 391 drivers/gpu/drm/i915/i915_active.c __active_ungrab(ref); ref 398 drivers/gpu/drm/i915/i915_active.c void i915_active_ungrab(struct i915_active *ref) ref 400 drivers/gpu/drm/i915/i915_active.c GEM_BUG_ON(!test_bit(I915_ACTIVE_GRAB_BIT, &ref->flags)); ref 402 drivers/gpu/drm/i915/i915_active.c active_retire(ref, true); ref 403 drivers/gpu/drm/i915/i915_active.c __active_ungrab(ref); ref 406 drivers/gpu/drm/i915/i915_active.c int i915_active_wait(struct i915_active *ref) ref 412 drivers/gpu/drm/i915/i915_active.c might_lock(&ref->mutex); ref 414 drivers/gpu/drm/i915/i915_active.c if (i915_active_is_idle(ref)) ref 417 drivers/gpu/drm/i915/i915_active.c err = mutex_lock_interruptible(&ref->mutex); ref 421 drivers/gpu/drm/i915/i915_active.c if (!atomic_add_unless(&ref->count, 1, 0)) { ref 422 drivers/gpu/drm/i915/i915_active.c mutex_unlock(&ref->mutex); ref 426 drivers/gpu/drm/i915/i915_active.c rbtree_postorder_for_each_entry_safe(it, n, &ref->tree, node) { ref 432 drivers/gpu/drm/i915/i915_active.c err = i915_active_request_retire(&it->base, BKL(ref), ref 438 drivers/gpu/drm/i915/i915_active.c __active_retire(ref, true); ref 442 drivers/gpu/drm/i915/i915_active.c if (wait_on_bit(&ref->flags, I915_ACTIVE_GRAB_BIT, TASK_KILLABLE)) ref 445 drivers/gpu/drm/i915/i915_active.c if (!i915_active_is_idle(ref)) ref 460 drivers/gpu/drm/i915/i915_active.c int i915_request_await_active(struct i915_request *rq, struct i915_active *ref) ref 465 drivers/gpu/drm/i915/i915_active.c if (RB_EMPTY_ROOT(&ref->tree)) ref 469 drivers/gpu/drm/i915/i915_active.c err = i915_active_acquire(ref); ref 473 drivers/gpu/drm/i915/i915_active.c mutex_lock(&ref->mutex); ref 474 drivers/gpu/drm/i915/i915_active.c rbtree_postorder_for_each_entry_safe(it, n, &ref->tree, node) { ref 479 drivers/gpu/drm/i915/i915_active.c mutex_unlock(&ref->mutex); ref 481 drivers/gpu/drm/i915/i915_active.c i915_active_release(ref); ref 486 drivers/gpu/drm/i915/i915_active.c void i915_active_fini(struct i915_active *ref) ref 488 drivers/gpu/drm/i915/i915_active.c debug_active_fini(ref); ref 489 drivers/gpu/drm/i915/i915_active.c GEM_BUG_ON(!RB_EMPTY_ROOT(&ref->tree)); ref 490 drivers/gpu/drm/i915/i915_active.c GEM_BUG_ON(atomic_read(&ref->count)); ref 491 drivers/gpu/drm/i915/i915_active.c mutex_destroy(&ref->mutex); ref 500 drivers/gpu/drm/i915/i915_active.c static struct active_node *reuse_idle_barrier(struct i915_active *ref, u64 idx) ref 504 drivers/gpu/drm/i915/i915_active.c if (RB_EMPTY_ROOT(&ref->tree)) ref 507 drivers/gpu/drm/i915/i915_active.c mutex_lock(&ref->mutex); ref 508 drivers/gpu/drm/i915/i915_active.c GEM_BUG_ON(i915_active_is_idle(ref)); ref 517 drivers/gpu/drm/i915/i915_active.c if (ref->cache && is_idle_barrier(ref->cache, idx)) { ref 518 drivers/gpu/drm/i915/i915_active.c p = &ref->cache->node; ref 523 drivers/gpu/drm/i915/i915_active.c p = ref->tree.rb_node; ref 568 drivers/gpu/drm/i915/i915_active.c ____active_del_barrier(ref, node, engine)) ref 572 drivers/gpu/drm/i915/i915_active.c mutex_unlock(&ref->mutex); ref 577 drivers/gpu/drm/i915/i915_active.c rb_erase(p, &ref->tree); /* Hide from waits and sibling allocations */ ref 578 drivers/gpu/drm/i915/i915_active.c if (p == &ref->cache->node) ref 579 drivers/gpu/drm/i915/i915_active.c ref->cache = NULL; ref 580 drivers/gpu/drm/i915/i915_active.c mutex_unlock(&ref->mutex); ref 585 drivers/gpu/drm/i915/i915_active.c int i915_active_acquire_preallocate_barrier(struct i915_active *ref, ref 593 drivers/gpu/drm/i915/i915_active.c GEM_BUG_ON(!llist_empty(&ref->preallocated_barriers)); ref 605 drivers/gpu/drm/i915/i915_active.c node = reuse_idle_barrier(ref, idx); ref 620 drivers/gpu/drm/i915/i915_active.c node->ref = ref; ref 635 drivers/gpu/drm/i915/i915_active.c atomic_inc(&ref->count); ref 639 drivers/gpu/drm/i915/i915_active.c llist_add(barrier_to_ll(node), &ref->preallocated_barriers); ref 646 drivers/gpu/drm/i915/i915_active.c llist_for_each_safe(pos, next, take_preallocated_barriers(ref)) { ref 649 drivers/gpu/drm/i915/i915_active.c atomic_dec(&ref->count); ref 657 drivers/gpu/drm/i915/i915_active.c void i915_active_acquire_barrier(struct i915_active *ref) ref 661 drivers/gpu/drm/i915/i915_active.c GEM_BUG_ON(i915_active_is_idle(ref)); ref 669 drivers/gpu/drm/i915/i915_active.c mutex_lock_nested(&ref->mutex, SINGLE_DEPTH_NESTING); ref 670 drivers/gpu/drm/i915/i915_active.c llist_for_each_safe(pos, next, take_preallocated_barriers(ref)) { ref 676 drivers/gpu/drm/i915/i915_active.c p = &ref->tree.rb_node; ref 689 drivers/gpu/drm/i915/i915_active.c rb_insert_color(&node->node, &ref->tree); ref 694 drivers/gpu/drm/i915/i915_active.c mutex_unlock(&ref->mutex); ref 362 drivers/gpu/drm/i915/i915_active.h struct i915_active *ref, ref 363 drivers/gpu/drm/i915/i915_active.h int (*active)(struct i915_active *ref), ref 364 drivers/gpu/drm/i915/i915_active.h void (*retire)(struct i915_active *ref), ref 366 drivers/gpu/drm/i915/i915_active.h #define i915_active_init(i915, ref, active, retire) do { \ ref 369 drivers/gpu/drm/i915/i915_active.h __i915_active_init(i915, ref, active, retire, &__key); \ ref 372 drivers/gpu/drm/i915/i915_active.h int i915_active_ref(struct i915_active *ref, ref 376 drivers/gpu/drm/i915/i915_active.h int i915_active_wait(struct i915_active *ref); ref 379 drivers/gpu/drm/i915/i915_active.h struct i915_active *ref); ref 383 drivers/gpu/drm/i915/i915_active.h int i915_active_acquire(struct i915_active *ref); ref 384 drivers/gpu/drm/i915/i915_active.h void i915_active_release(struct i915_active *ref); ref 385 drivers/gpu/drm/i915/i915_active.h void __i915_active_release_nested(struct i915_active *ref, int subclass); ref 387 drivers/gpu/drm/i915/i915_active.h bool i915_active_trygrab(struct i915_active *ref); ref 388 drivers/gpu/drm/i915/i915_active.h void i915_active_ungrab(struct i915_active *ref); ref 391 drivers/gpu/drm/i915/i915_active.h i915_active_is_idle(const struct i915_active *ref) ref 393 drivers/gpu/drm/i915/i915_active.h return !atomic_read(&ref->count); ref 397 drivers/gpu/drm/i915/i915_active.h void i915_active_fini(struct i915_active *ref); ref 399 drivers/gpu/drm/i915/i915_active.h static inline void i915_active_fini(struct i915_active *ref) { } ref 402 drivers/gpu/drm/i915/i915_active.h int i915_active_acquire_preallocate_barrier(struct i915_active *ref, ref 404 drivers/gpu/drm/i915/i915_active.h void i915_active_acquire_barrier(struct i915_active *ref); ref 57 drivers/gpu/drm/i915/i915_active_types.h int (*active)(struct i915_active *ref); ref 58 drivers/gpu/drm/i915/i915_active_types.h void (*retire)(struct i915_active *ref); ref 1278 drivers/gpu/drm/i915/i915_drv.h unsigned int cdclk, vco, ref, bypass; ref 2372 drivers/gpu/drm/i915/i915_drv.h if (ctx && !kref_get_unless_zero(&ctx->ref)) ref 536 drivers/gpu/drm/i915/i915_gem_gtt.c container_of(kref, struct i915_address_space, ref); ref 547 drivers/gpu/drm/i915/i915_gem_gtt.c kref_init(&vm->ref); ref 289 drivers/gpu/drm/i915/i915_gem_gtt.h struct kref ref; ref 569 drivers/gpu/drm/i915/i915_gem_gtt.h kref_get(&vm->ref); ref 577 drivers/gpu/drm/i915/i915_gem_gtt.h kref_put(&vm->ref, i915_vm_release); ref 923 drivers/gpu/drm/i915/i915_gpu_error.c container_of(error_ref, typeof(*error), ref); ref 1697 drivers/gpu/drm/i915/i915_gpu_error.c kref_init(&error->ref); ref 1764 drivers/gpu/drm/i915/i915_gpu_error.c __i915_gpu_state_free(&error->ref); ref 32 drivers/gpu/drm/i915/i915_gpu_error.h struct kref ref; ref 209 drivers/gpu/drm/i915/i915_gpu_error.h kref_get(&gpu->ref); ref 220 drivers/gpu/drm/i915/i915_gpu_error.h kref_put(&gpu->ref, __i915_gpu_state_free); ref 83 drivers/gpu/drm/i915/i915_vma.c static inline struct i915_vma *active_to_vma(struct i915_active *ref) ref 85 drivers/gpu/drm/i915/i915_vma.c return container_of(ref, typeof(struct i915_vma), active); ref 88 drivers/gpu/drm/i915/i915_vma.c static int __i915_vma_active(struct i915_active *ref) ref 90 drivers/gpu/drm/i915/i915_vma.c return i915_vma_tryget(active_to_vma(ref)) ? 0 : -ENOENT; ref 93 drivers/gpu/drm/i915/i915_vma.c static void __i915_vma_retire(struct i915_active *ref) ref 95 drivers/gpu/drm/i915/i915_vma.c i915_vma_put(active_to_vma(ref)); ref 8322 drivers/gpu/drm/i915/intel_pm.c if (!kref_get_unless_zero(&i915->drm.ref)) ref 19 drivers/gpu/drm/i915/selftests/i915_active.c struct kref ref; ref 25 drivers/gpu/drm/i915/selftests/i915_active.c kref_get(&active->ref); ref 34 drivers/gpu/drm/i915/selftests/i915_active.c static void __live_release(struct kref *ref) ref 36 drivers/gpu/drm/i915/selftests/i915_active.c struct live_active *active = container_of(ref, typeof(*active), ref); ref 43 drivers/gpu/drm/i915/selftests/i915_active.c kref_put(&active->ref, __live_release); ref 70 drivers/gpu/drm/i915/selftests/i915_active.c kref_init(&active->ref); ref 87 drivers/gpu/drm/i915/selftests/lib_sw_fence.c struct kref ref; ref 117 drivers/gpu/drm/i915/selftests/lib_sw_fence.c refcount_set(&h->ref.refcount, 2); ref 122 drivers/gpu/drm/i915/selftests/lib_sw_fence.c static void heap_fence_release(struct kref *ref) ref 124 drivers/gpu/drm/i915/selftests/lib_sw_fence.c struct heap_fence *h = container_of(ref, typeof(*h), ref); ref 135 drivers/gpu/drm/i915/selftests/lib_sw_fence.c kref_put(&h->ref, heap_fence_release); ref 39 drivers/gpu/drm/lima/lima_ctx.c static void lima_ctx_do_release(struct kref *ref) ref 41 drivers/gpu/drm/lima/lima_ctx.c struct lima_ctx *ctx = container_of(ref, struct lima_ctx, refcnt); ref 1289 drivers/gpu/drm/msm/adreno/a5xx_gpu.c struct msm_gpu_state, ref); ref 1304 drivers/gpu/drm/msm/adreno/a5xx_gpu.c return kref_put(&state->ref, a5xx_gpu_state_destroy); ref 909 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c struct msm_gpu_state, ref); ref 925 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c return kref_put(&state->ref, a6xx_gpu_state_destroy); ref 528 drivers/gpu/drm/msm/adreno/adreno_gpu.c kref_init(&state->ref); ref 607 drivers/gpu/drm/msm/adreno/adreno_gpu.c struct msm_gpu_state, ref); ref 618 drivers/gpu/drm/msm/adreno/adreno_gpu.c return kref_put(&state->ref, adreno_gpu_state_kref_destroy); ref 263 drivers/gpu/drm/msm/dsi/pll/dsi_pll_14nm.c u32 ref, rem; ref 278 drivers/gpu/drm/msm/dsi/pll/dsi_pll_14nm.c ref = pll->vco_ref_clk_rate; ref 279 drivers/gpu/drm/msm/dsi/pll/dsi_pll_14nm.c ref /= 1000; ref 280 drivers/gpu/drm/msm/dsi/pll/dsi_pll_14nm.c step_size = div_u64(step_size, ref); ref 177 drivers/gpu/drm/msm/msm_gpu.h struct kref ref; ref 188 drivers/gpu/drm/msm/msm_gpu.h struct kref ref; ref 291 drivers/gpu/drm/msm/msm_gpu.h kref_put(&queue->ref, msm_submitqueue_destroy); ref 301 drivers/gpu/drm/msm/msm_gpu.h kref_get(&gpu->crashstate->ref); ref 13 drivers/gpu/drm/msm/msm_submitqueue.c struct msm_gpu_submitqueue, ref); ref 30 drivers/gpu/drm/msm/msm_submitqueue.c kref_get(&entry->ref); ref 70 drivers/gpu/drm/msm/msm_submitqueue.c kref_init(&queue->ref); ref 12 drivers/gpu/drm/nouveau/include/nvif/if0004.h __u32 ref; ref 482 drivers/gpu/drm/nouveau/nouveau_bo.c int ret, ref; ref 488 drivers/gpu/drm/nouveau/nouveau_bo.c ref = --nvbo->pin_refcnt; ref 489 drivers/gpu/drm/nouveau/nouveau_bo.c WARN_ON_ONCE(ref < 0); ref 490 drivers/gpu/drm/nouveau/nouveau_bo.c if (ref) ref 51 drivers/gpu/drm/nouveau/nouveau_bo.h nouveau_bo_ref(struct nouveau_bo *ref, struct nouveau_bo **pnvbo) ref 59 drivers/gpu/drm/nouveau/nouveau_bo.h if (ref) { ref 60 drivers/gpu/drm/nouveau/nouveau_bo.h ttm_bo_get(&ref->bo); ref 61 drivers/gpu/drm/nouveau/nouveau_bo.h *pnvbo = nouveau_bo(&ref->bo); ref 65 drivers/gpu/drm/nouveau/nv04_fence.c return args.ref; ref 36 drivers/gpu/drm/nouveau/nvkm/engine/sw/nv04.c atomic_t ref; ref 53 drivers/gpu/drm/nouveau/nvkm/engine/sw/nv04.c args->v0.ref = atomic_read(&chan->ref); ref 94 drivers/gpu/drm/nouveau/nvkm/engine/sw/nv04.c atomic_set(&chan->ref, data); ref 116 drivers/gpu/drm/nouveau/nvkm/engine/sw/nv04.c atomic_set(&chan->ref, 0); ref 210 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gf100.c calc_div(struct gf100_clk *clk, int idx, u32 ref, u32 freq, u32 *ddiv) ref 212 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gf100.c u32 div = min((ref * 2) / freq, (u32)65); ref 217 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gf100.c return (ref * 2) / div; ref 223 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gk104.c calc_div(struct gk104_clk *clk, int idx, u32 ref, u32 freq, u32 *ddiv) ref 225 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gk104.c u32 div = min((ref * 2) / freq, (u32)65); ref 230 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gk104.c return (ref * 2) / div; ref 54 drivers/gpu/drm/nouveau/nvkm/subdev/clk/mcp77.c u32 ref = nvkm_clk_read(&clk->base, nv_clk_src_href); ref 73 drivers/gpu/drm/nouveau/nvkm/subdev/clk/mcp77.c clock = ref * N1 / M1; ref 47 drivers/gpu/drm/nouveau/nvkm/subdev/clk/nv40.c u32 ref = 27000, khz = 0; ref 50 drivers/gpu/drm/nouveau/nvkm/subdev/clk/nv40.c khz = ref * N / M; ref 66 drivers/gpu/drm/nouveau/nvkm/subdev/clk/nv40.c u32 ref = 27000, khz = 0; ref 69 drivers/gpu/drm/nouveau/nvkm/subdev/clk/nv40.c khz = ref * N1 / M1; ref 56 drivers/gpu/drm/nouveau/nvkm/subdev/clk/nv50.c u32 coef, ref = nvkm_clk_read(&clk->base, nv_clk_src_crystal); ref 74 drivers/gpu/drm/nouveau/nvkm/subdev/clk/nv50.c ref *= (coef & 0x01000000) ? 2 : 4; ref 119 drivers/gpu/drm/nouveau/nvkm/subdev/clk/nv50.c return (ref * N / M) >> P; ref 164 drivers/gpu/drm/nouveau/nvkm/subdev/clk/nv50.c u32 ref = read_pll_ref(clk, base); ref 179 drivers/gpu/drm/nouveau/nvkm/subdev/clk/nv50.c freq = ref * N1 / M1; ref 141 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgf100.c int ref, div, out; ref 189 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgf100.c ref = nvkm_clk_read(clk, nv_clk_src_sppll0); ref 191 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgf100.c ref = nvkm_clk_read(clk, nv_clk_src_sppll1); ref 192 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgf100.c div = max(min((ref * 2) / freq, (u32)65), (u32)2) - 2; ref 193 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgf100.c out = (ref * 2) / (div + 2); ref 502 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c u64 addr, u64 size, const char *name, bool ref, bool pfn, ref 544 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c if (ref && NVKM_VMM_PDE_INVALID(pgd->pde[pdei])) { ref 556 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c if (ref && !pgt->refs[desc[it.lvl - 1].type == SPT]) { ref 630 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_ptes_sparse(struct nvkm_vmm *vmm, u64 addr, u64 size, bool ref) ref 661 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c if (ref) { ref 253 drivers/gpu/drm/panfrost/panfrost_job.c static void panfrost_job_cleanup(struct kref *ref) ref 255 drivers/gpu/drm/panfrost/panfrost_job.c struct panfrost_job *job = container_of(ref, struct panfrost_job, ref 35 drivers/gpu/drm/radeon/rv740_dpm.c u32 ref = 0; ref 39 drivers/gpu/drm/radeon/rv740_dpm.c ref = 1; ref 42 drivers/gpu/drm/radeon/rv740_dpm.c ref = 2; ref 45 drivers/gpu/drm/radeon/rv740_dpm.c ref = 3; ref 48 drivers/gpu/drm/radeon/rv740_dpm.c ref = 2; ref 51 drivers/gpu/drm/radeon/rv740_dpm.c ref = 3; ref 54 drivers/gpu/drm/radeon/rv740_dpm.c ref = 4; ref 57 drivers/gpu/drm/radeon/rv740_dpm.c ref = 5; ref 61 drivers/gpu/drm/radeon/rv740_dpm.c ref = 0; ref 65 drivers/gpu/drm/radeon/rv740_dpm.c return ref; ref 210 drivers/gpu/drm/v3d/v3d_drv.h void (*free)(struct kref *ref); ref 352 drivers/gpu/drm/v3d/v3d_gem.c v3d_job_free(struct kref *ref) ref 354 drivers/gpu/drm/v3d/v3d_gem.c struct v3d_job *job = container_of(ref, struct v3d_job, refcount); ref 380 drivers/gpu/drm/v3d/v3d_gem.c v3d_render_job_free(struct kref *ref) ref 382 drivers/gpu/drm/v3d/v3d_gem.c struct v3d_render_job *job = container_of(ref, struct v3d_render_job, ref 390 drivers/gpu/drm/v3d/v3d_gem.c v3d_job_free(ref); ref 433 drivers/gpu/drm/v3d/v3d_gem.c struct v3d_job *job, void (*free)(struct kref *ref), ref 340 drivers/gpu/drm/vc4/vc4_v3d.c static void bin_bo_release(struct kref *ref) ref 342 drivers/gpu/drm/vc4/vc4_v3d.c struct vc4_dev *vc4 = container_of(ref, struct vc4_dev, bin_bo_kref); ref 314 drivers/gpu/drm/vmwgfx/ttm_object.c struct ttm_ref_object *ref; ref 325 drivers/gpu/drm/vmwgfx/ttm_object.c ref = drm_hash_entry(hash, struct ttm_ref_object, hash); ref 326 drivers/gpu/drm/vmwgfx/ttm_object.c if (unlikely(base != ref->obj)) ref 333 drivers/gpu/drm/vmwgfx/ttm_object.c if (unlikely(kref_read(&ref->kref) == 0)) ref 350 drivers/gpu/drm/vmwgfx/ttm_object.c struct ttm_ref_object *ref; ref 370 drivers/gpu/drm/vmwgfx/ttm_object.c ref = drm_hash_entry(hash, struct ttm_ref_object, hash); ref 371 drivers/gpu/drm/vmwgfx/ttm_object.c if (kref_get_unless_zero(&ref->kref)) { ref 381 drivers/gpu/drm/vmwgfx/ttm_object.c ret = ttm_mem_global_alloc(mem_glob, sizeof(*ref), ref 385 drivers/gpu/drm/vmwgfx/ttm_object.c ref = kmalloc(sizeof(*ref), GFP_KERNEL); ref 386 drivers/gpu/drm/vmwgfx/ttm_object.c if (unlikely(ref == NULL)) { ref 387 drivers/gpu/drm/vmwgfx/ttm_object.c ttm_mem_global_free(mem_glob, sizeof(*ref)); ref 391 drivers/gpu/drm/vmwgfx/ttm_object.c ref->hash.key = base->handle; ref 392 drivers/gpu/drm/vmwgfx/ttm_object.c ref->obj = base; ref 393 drivers/gpu/drm/vmwgfx/ttm_object.c ref->tfile = tfile; ref 394 drivers/gpu/drm/vmwgfx/ttm_object.c ref->ref_type = ref_type; ref 395 drivers/gpu/drm/vmwgfx/ttm_object.c kref_init(&ref->kref); ref 398 drivers/gpu/drm/vmwgfx/ttm_object.c ret = drm_ht_insert_item_rcu(ht, &ref->hash); ref 401 drivers/gpu/drm/vmwgfx/ttm_object.c list_add_tail(&ref->head, &tfile->ref_list); ref 412 drivers/gpu/drm/vmwgfx/ttm_object.c ttm_mem_global_free(mem_glob, sizeof(*ref)); ref 413 drivers/gpu/drm/vmwgfx/ttm_object.c kfree(ref); ref 422 drivers/gpu/drm/vmwgfx/ttm_object.c struct ttm_ref_object *ref = ref 424 drivers/gpu/drm/vmwgfx/ttm_object.c struct ttm_base_object *base = ref->obj; ref 425 drivers/gpu/drm/vmwgfx/ttm_object.c struct ttm_object_file *tfile = ref->tfile; ref 429 drivers/gpu/drm/vmwgfx/ttm_object.c ht = &tfile->ref_hash[ref->ref_type]; ref 430 drivers/gpu/drm/vmwgfx/ttm_object.c (void)drm_ht_remove_item_rcu(ht, &ref->hash); ref 431 drivers/gpu/drm/vmwgfx/ttm_object.c list_del(&ref->head); ref 434 drivers/gpu/drm/vmwgfx/ttm_object.c if (ref->ref_type != TTM_REF_USAGE && base->ref_obj_release) ref 435 drivers/gpu/drm/vmwgfx/ttm_object.c base->ref_obj_release(base, ref->ref_type); ref 437 drivers/gpu/drm/vmwgfx/ttm_object.c ttm_base_object_unref(&ref->obj); ref 438 drivers/gpu/drm/vmwgfx/ttm_object.c ttm_mem_global_free(mem_glob, sizeof(*ref)); ref 439 drivers/gpu/drm/vmwgfx/ttm_object.c kfree_rcu(ref, rcu_head); ref 447 drivers/gpu/drm/vmwgfx/ttm_object.c struct ttm_ref_object *ref; ref 457 drivers/gpu/drm/vmwgfx/ttm_object.c ref = drm_hash_entry(hash, struct ttm_ref_object, hash); ref 458 drivers/gpu/drm/vmwgfx/ttm_object.c kref_put(&ref->kref, ttm_ref_object_release); ref 465 drivers/gpu/drm/vmwgfx/ttm_object.c struct ttm_ref_object *ref; ref 480 drivers/gpu/drm/vmwgfx/ttm_object.c ref = list_entry(list, struct ttm_ref_object, head); ref 481 drivers/gpu/drm/vmwgfx/ttm_object.c ttm_ref_object_release(&ref->kref); ref 401 drivers/gpu/drm/zte/zx_vga.c unsigned long ref = clk_get_rate(vga->i2c_wclk); ref 408 drivers/gpu/drm/zte/zx_vga.c div = DIV_ROUND_UP(ref / 1000, 400 * 4) - 1; ref 206 drivers/gpu/host1x/intr.c void **ref) ref 218 drivers/gpu/host1x/intr.c if (ref) ref 241 drivers/gpu/host1x/intr.c if (ref) ref 242 drivers/gpu/host1x/intr.c *ref = waiter; ref 246 drivers/gpu/host1x/intr.c void host1x_intr_put_ref(struct host1x *host, unsigned int id, void *ref) ref 248 drivers/gpu/host1x/intr.c struct host1x_waitlist *waiter = ref; ref 71 drivers/gpu/host1x/intr.h void **ref); ref 78 drivers/gpu/host1x/intr.h void host1x_intr_put_ref(struct host1x *host, unsigned int id, void *ref); ref 47 drivers/gpu/host1x/job.c kref_init(&job->ref); ref 69 drivers/gpu/host1x/job.c kref_get(&job->ref); ref 74 drivers/gpu/host1x/job.c static void job_free(struct kref *ref) ref 76 drivers/gpu/host1x/job.c struct host1x_job *job = container_of(ref, struct host1x_job, ref); ref 83 drivers/gpu/host1x/job.c kref_put(&job->ref, job_free); ref 209 drivers/gpu/host1x/syncpt.c void *ref; ref 249 drivers/gpu/host1x/syncpt.c &wq, waiter, &ref); ref 297 drivers/gpu/host1x/syncpt.c host1x_intr_put_ref(sp->host, sp->id, ref); ref 311 drivers/hwmon/fschmd.c static void fschmd_release_resources(struct kref *ref) ref 313 drivers/hwmon/fschmd.c struct fschmd_data *data = container_of(ref, struct fschmd_data, kref); ref 149 drivers/hwmon/pc87360.c #define IN_FROM_REG(val, ref) (((val) * (ref) + 128) / 256) ref 150 drivers/hwmon/pc87360.c #define IN_TO_REG(val, ref) ((val) < 0 ? 0 : \ ref 151 drivers/hwmon/pc87360.c (val) * 256 >= (ref) * 255 ? 255 : \ ref 152 drivers/hwmon/pc87360.c ((val) * 256 + (ref) / 2) / (ref)) ref 278 drivers/hwmon/w83793.c static void w83793_release_resources(struct kref *ref) ref 280 drivers/hwmon/w83793.c struct w83793_data *data = container_of(ref, struct w83793_data, kref); ref 28 drivers/iio/adc/ltc2497.c struct regulator *ref; ref 116 drivers/iio/adc/ltc2497.c ret = regulator_get_voltage(st->ref); ref 217 drivers/iio/adc/ltc2497.c st->ref = devm_regulator_get(&client->dev, "vref"); ref 218 drivers/iio/adc/ltc2497.c if (IS_ERR(st->ref)) ref 219 drivers/iio/adc/ltc2497.c return PTR_ERR(st->ref); ref 221 drivers/iio/adc/ltc2497.c ret = regulator_enable(st->ref); ref 251 drivers/iio/adc/ltc2497.c regulator_disable(st->ref); ref 263 drivers/iio/adc/ltc2497.c regulator_disable(st->ref); ref 32 drivers/iio/adc/ti-adc081c.c struct regulator *ref; ref 57 drivers/iio/adc/ti-adc081c.c err = regulator_get_voltage(adc->ref); ref 176 drivers/iio/adc/ti-adc081c.c adc->ref = devm_regulator_get(&client->dev, "vref"); ref 177 drivers/iio/adc/ti-adc081c.c if (IS_ERR(adc->ref)) ref 178 drivers/iio/adc/ti-adc081c.c return PTR_ERR(adc->ref); ref 180 drivers/iio/adc/ti-adc081c.c err = regulator_enable(adc->ref); ref 210 drivers/iio/adc/ti-adc081c.c regulator_disable(adc->ref); ref 222 drivers/iio/adc/ti-adc081c.c regulator_disable(adc->ref); ref 68 drivers/iio/adc/ti-adc161s626.c struct regulator *ref; ref 151 drivers/iio/adc/ti-adc161s626.c ret = regulator_get_voltage(data->ref); ref 206 drivers/iio/adc/ti-adc161s626.c data->ref = devm_regulator_get(&spi->dev, "vdda"); ref 207 drivers/iio/adc/ti-adc161s626.c if (!IS_ERR(data->ref)) { ref 208 drivers/iio/adc/ti-adc161s626.c ret = regulator_enable(data->ref); ref 228 drivers/iio/adc/ti-adc161s626.c regulator_disable(data->ref); ref 240 drivers/iio/adc/ti-adc161s626.c regulator_disable(data->ref); ref 392 drivers/iio/dac/mcp4725.c u8 ref; ref 475 drivers/iio/dac/mcp4725.c ref = (inbuf[3] >> 3) & 0x3; ref 477 drivers/iio/dac/mcp4725.c if (data->id == MCP4726 && ref != data->ref_mode) { ref 480 drivers/iio/dac/mcp4725.c data->ref_mode, ref, data->ref_mode); ref 996 drivers/iio/imu/st_lsm6dsx/st_lsm6dsx_core.c struct st_lsm6dsx_sensor *ref = iio_priv(hw->iio_devs[id]); ref 1000 drivers/iio/imu/st_lsm6dsx/st_lsm6dsx_core.c return max_t(u16, ref->odr, odr); ref 1004 drivers/iio/imu/st_lsm6dsx/st_lsm6dsx_core.c return (hw->enable_mask & BIT(id)) ? ref->odr : 0; ref 203 drivers/iio/industrialio-buffer.c kref_init(&buffer->ref); ref 1429 drivers/iio/industrialio-buffer.c static void iio_buffer_release(struct kref *ref) ref 1431 drivers/iio/industrialio-buffer.c struct iio_buffer *buffer = container_of(ref, struct iio_buffer, ref); ref 1445 drivers/iio/industrialio-buffer.c kref_get(&buffer->ref); ref 1458 drivers/iio/industrialio-buffer.c kref_put(&buffer->ref, iio_buffer_release); ref 47 drivers/infiniband/core/rdma_core.c kref_get(&uobject->ref); ref 50 drivers/infiniband/core/rdma_core.c static void uverbs_uobject_free(struct kref *ref) ref 53 drivers/infiniband/core/rdma_core.c container_of(ref, struct ib_uobject, ref); ref 63 drivers/infiniband/core/rdma_core.c kref_put(&uobject->ref, uverbs_uobject_free); ref 299 drivers/infiniband/core/rdma_core.c kref_init(&uobj->ref); ref 334 drivers/infiniband/core/rdma_core.c if (!uobj || !kref_get_unless_zero(&uobj->ref)) ref 615 drivers/infiniband/core/rdma_core.c kref_get(&uobj->ufile->ref); ref 792 drivers/infiniband/core/rdma_core.c kref_put(&ufile->ref, ib_uverbs_release_file); ref 66 drivers/infiniband/core/sa_query.c struct kref ref; ref 1147 drivers/infiniband/core/sa_query.c struct ib_sa_sm_ah *sm_ah = container_of(kref, struct ib_sa_sm_ah, ref); ref 1305 drivers/infiniband/core/sa_query.c kref_get(&query->port->sm_ah->ref); ref 1315 drivers/infiniband/core/sa_query.c kref_put(&query->sm_ah->ref, free_sm_ah); ref 1326 drivers/infiniband/core/sa_query.c kref_put(&query->sm_ah->ref, free_sm_ah); ref 1338 drivers/infiniband/core/sa_query.c kref_put(&query->sm_ah->ref, free_sm_ah); ref 2234 drivers/infiniband/core/sa_query.c kref_init(&new_ah->ref); ref 2281 drivers/infiniband/core/sa_query.c kref_put(&port->sm_ah->ref, free_sm_ah); ref 2306 drivers/infiniband/core/sa_query.c kref_put(&port->sm_ah->ref, free_sm_ah); ref 2417 drivers/infiniband/core/sa_query.c kref_put(&sa_dev->port[i].sm_ah->ref, free_sm_ah); ref 88 drivers/infiniband/core/ucma.c atomic_t ref; ref 156 drivers/infiniband/core/ucma.c atomic_inc(&ctx->ref); ref 164 drivers/infiniband/core/ucma.c if (atomic_dec_and_test(&ctx->ref)) ref 216 drivers/infiniband/core/ucma.c atomic_set(&ctx->ref, 1); ref 1547 drivers/infiniband/core/ucma.c else if (!atomic_inc_not_zero(&mc->ctx->ref)) ref 129 drivers/infiniband/core/uverbs.h struct kref ref; ref 139 drivers/infiniband/core/uverbs.h struct kref ref; ref 232 drivers/infiniband/core/uverbs.h void ib_uverbs_release_file(struct kref *ref); ref 128 drivers/infiniband/core/uverbs_main.c static void ib_uverbs_release_async_event_file(struct kref *ref) ref 131 drivers/infiniband/core/uverbs_main.c container_of(ref, struct ib_uverbs_async_event_file, ref); ref 191 drivers/infiniband/core/uverbs_main.c void ib_uverbs_release_file(struct kref *ref) ref 194 drivers/infiniband/core/uverbs_main.c container_of(ref, struct ib_uverbs_file, ref); ref 211 drivers/infiniband/core/uverbs_main.c kref_put(&file->async_file->ref, ref 372 drivers/infiniband/core/uverbs_main.c kref_put(&uverbs_file->ref, ib_uverbs_release_file); ref 373 drivers/infiniband/core/uverbs_main.c kref_put(&file->ref, ib_uverbs_release_async_event_file); ref 546 drivers/infiniband/core/uverbs_main.c kref_put(&file->async_file->ref, ib_uverbs_release_async_event_file); ref 571 drivers/infiniband/core/uverbs_main.c kref_get(&ev_file->uverbs_file->ref); ref 572 drivers/infiniband/core/uverbs_main.c kref_init(&ev_file->ref); ref 585 drivers/infiniband/core/uverbs_main.c kref_get(&uverbs_file->async_file->ref); ref 595 drivers/infiniband/core/uverbs_main.c kref_put(&ev_file->uverbs_file->ref, ib_uverbs_release_file); ref 596 drivers/infiniband/core/uverbs_main.c kref_put(&ev_file->ref, ib_uverbs_release_async_event_file); ref 1087 drivers/infiniband/core/uverbs_main.c kref_init(&file->ref); ref 1128 drivers/infiniband/core/uverbs_main.c kref_put(&file->ref, ib_uverbs_release_file); ref 1345 drivers/infiniband/core/uverbs_main.c kref_get(&file->ref); ref 1356 drivers/infiniband/core/uverbs_main.c kref_put(&file->ref, ib_uverbs_release_file); ref 2014 drivers/infiniband/hw/bnxt_re/ib_verbs.c struct bnxt_qplib_sge ref, sge; ref 2027 drivers/infiniband/hw/bnxt_re/ib_verbs.c ref.addr = wqe->sg_list[0].addr; ref 2028 drivers/infiniband/hw/bnxt_re/ib_verbs.c ref.lkey = wqe->sg_list[0].lkey; ref 2029 drivers/infiniband/hw/bnxt_re/ib_verbs.c ref.size = wqe->sg_list[0].size; ref 2039 drivers/infiniband/hw/bnxt_re/ib_verbs.c sqp_entry->sge.addr = ref.addr; ref 2040 drivers/infiniband/hw/bnxt_re/ib_verbs.c sqp_entry->sge.lkey = ref.lkey; ref 2041 drivers/infiniband/hw/bnxt_re/ib_verbs.c sqp_entry->sge.size = ref.size; ref 851 drivers/infiniband/hw/hfi1/affinity.c static void hfi1_irq_notifier_release(struct kref *ref) ref 141 drivers/infiniband/hw/mlx4/mcg.c #define safe_atomic_dec(ref) \ ref 143 drivers/infiniband/hw/mlx4/mcg.c if (atomic_dec_and_test(ref)) \ ref 609 drivers/infiniband/hw/mlx4/mcg.c int ref = 0; ref 624 drivers/infiniband/hw/mlx4/mcg.c ++ref; ref 633 drivers/infiniband/hw/mlx4/mcg.c ref = 1; ref 639 drivers/infiniband/hw/mlx4/mcg.c return ref; ref 82 drivers/infiniband/hw/qedr/qedr_iw_cm.c static void qedr_iw_free_qp(struct kref *ref) ref 84 drivers/infiniband/hw/qedr/qedr_iw_cm.c struct qedr_qp *qp = container_of(ref, struct qedr_qp, refcnt); ref 90 drivers/infiniband/hw/qedr/qedr_iw_cm.c qedr_iw_free_ep(struct kref *ref) ref 92 drivers/infiniband/hw/qedr/qedr_iw_cm.c struct qedr_iw_ep *ep = container_of(ref, struct qedr_iw_ep, refcnt); ref 2805 drivers/infiniband/hw/qib/qib_iba7322.c static void qib_irq_notifier_release(struct kref *ref) ref 2808 drivers/infiniband/hw/qib/qib_iba7322.c container_of(ref, struct qib_irq_notify, notify.kref); ref 2821 drivers/infiniband/hw/qib/qib_iba7322.c "release on HCA notify 0x%p n 0x%p\n", ref, n); ref 328 drivers/infiniband/sw/rdmavt/cq.c kref_put(&cq->ip->ref, rvt_release_mmap_info); ref 71 drivers/infiniband/sw/rdmavt/mmap.c void rvt_release_mmap_info(struct kref *ref) ref 74 drivers/infiniband/sw/rdmavt/mmap.c container_of(ref, struct rvt_mmap_info, ref); ref 89 drivers/infiniband/sw/rdmavt/mmap.c kref_get(&ip->ref); ref 96 drivers/infiniband/sw/rdmavt/mmap.c kref_put(&ip->ref, rvt_release_mmap_info); ref 186 drivers/infiniband/sw/rdmavt/mmap.c kref_init(&ip->ref); ref 54 drivers/infiniband/sw/rdmavt/mmap.h void rvt_release_mmap_info(struct kref *ref); ref 128 drivers/infiniband/sw/rdmavt/mr.c static void __rvt_mregion_complete(struct percpu_ref *ref) ref 130 drivers/infiniband/sw/rdmavt/mr.c struct rvt_mregion *mr = container_of(ref, struct rvt_mregion, ref 1297 drivers/infiniband/sw/rdmavt/qp.c kref_put(&qp->ip->ref, rvt_release_mmap_info); ref 1737 drivers/infiniband/sw/rdmavt/qp.c kref_put(&qp->ip->ref, rvt_release_mmap_info); ref 344 drivers/infiniband/sw/rdmavt/srq.c kref_put(&srq->ip->ref, rvt_release_mmap_info); ref 87 drivers/infiniband/sw/rxe/rxe_loc.h struct kref ref; ref 93 drivers/infiniband/sw/rxe/rxe_loc.h void rxe_mmap_release(struct kref *ref); ref 45 drivers/infiniband/sw/rxe/rxe_mmap.c void rxe_mmap_release(struct kref *ref) ref 47 drivers/infiniband/sw/rxe/rxe_mmap.c struct rxe_mmap_info *ip = container_of(ref, ref 48 drivers/infiniband/sw/rxe/rxe_mmap.c struct rxe_mmap_info, ref); ref 70 drivers/infiniband/sw/rxe/rxe_mmap.c kref_get(&ip->ref); ref 77 drivers/infiniband/sw/rxe/rxe_mmap.c kref_put(&ip->ref, rxe_mmap_release); ref 174 drivers/infiniband/sw/rxe/rxe_mmap.c kref_init(&ip->ref); ref 204 drivers/infiniband/sw/rxe/rxe_queue.c kref_put(&q->ip->ref, rxe_mmap_release); ref 160 drivers/infiniband/sw/siw/siw.h struct kref ref; ref 433 drivers/infiniband/sw/siw/siw.h struct kref ref; ref 531 drivers/infiniband/sw/siw/siw.h void siw_free_qp(struct kref *ref); ref 616 drivers/infiniband/sw/siw/siw.h if (likely(qp && kref_get_unless_zero(&qp->ref))) { ref 631 drivers/infiniband/sw/siw/siw.h kref_get(&qp->ref); ref 636 drivers/infiniband/sw/siw/siw.h kref_put(&qp->ref, siw_free_qp); ref 172 drivers/infiniband/sw/siw/siw_cm.c kref_init(&cep->ref); ref 249 drivers/infiniband/sw/siw/siw_cm.c static void __siw_cep_dealloc(struct kref *ref) ref 251 drivers/infiniband/sw/siw/siw_cm.c struct siw_cep *cep = container_of(ref, struct siw_cep, ref); ref 441 drivers/infiniband/sw/siw/siw_cm.c WARN_ON(kref_read(&cep->ref) < 1); ref 442 drivers/infiniband/sw/siw/siw_cm.c kref_put(&cep->ref, __siw_cep_dealloc); ref 447 drivers/infiniband/sw/siw/siw_cm.c kref_get(&cep->ref); ref 42 drivers/infiniband/sw/siw/siw_cm.h struct kref ref; ref 54 drivers/infiniband/sw/siw/siw_mem.c if (likely(mem && kref_get_unless_zero(&mem->ref))) { ref 107 drivers/infiniband/sw/siw/siw_mem.c kref_init(&mem->ref); ref 140 drivers/infiniband/sw/siw/siw_mem.c void siw_free_mem(struct kref *ref) ref 142 drivers/infiniband/sw/siw/siw_mem.c struct siw_mem *mem = container_of(ref, struct siw_mem, ref); ref 25 drivers/infiniband/sw/siw/siw_mem.h void siw_free_mem(struct kref *ref); ref 29 drivers/infiniband/sw/siw/siw_mem.h kref_put(&mem->ref, siw_free_mem); ref 1304 drivers/infiniband/sw/siw/siw_qp.c kref_init(&qp->ref); ref 1312 drivers/infiniband/sw/siw/siw_qp.c void siw_free_qp(struct kref *ref) ref 1314 drivers/infiniband/sw/siw/siw_qp.c struct siw_qp *found, *qp = container_of(ref, struct siw_qp, ref); ref 424 drivers/infiniband/ulp/ipoib/ipoib.h struct kref ref; ref 491 drivers/infiniband/ulp/ipoib/ipoib.h kref_put(&ah->ref, ipoib_free_ah); ref 67 drivers/infiniband/ulp/ipoib/ipoib_ib.c kref_init(&ah->ref); ref 83 drivers/infiniband/ulp/ipoib/ipoib_ib.c struct ipoib_ah *ah = container_of(kref, struct ipoib_ah, ref); ref 818 drivers/infiniband/ulp/ipoib/ipoib_main.c kref_get(&path->ah->ref); ref 984 drivers/infiniband/ulp/ipoib/ipoib_main.c kref_get(&path->ah->ref); ref 823 drivers/infiniband/ulp/ipoib/ipoib_multicast.c kref_get(&mcast->ah->ref); ref 3898 drivers/iommu/amd_iommu.c data->ref = entry; ref 4597 drivers/iommu/amd_iommu.c struct irte_ga *ref = (struct irte_ga *) ir_data->ref; ref 4600 drivers/iommu/amd_iommu.c !ref || !entry || !entry->lo.fields_vapic.guest_mode) ref 4613 drivers/iommu/amd_iommu.c if (ref->lo.fields_vapic.guest_mode) { ref 4615 drivers/iommu/amd_iommu.c ref->lo.fields_vapic.destination = ref 4617 drivers/iommu/amd_iommu.c ref->hi.fields.destination = ref 4620 drivers/iommu/amd_iommu.c ref->lo.fields_vapic.is_run = is_run; ref 1995 drivers/iommu/amd_iommu_init.c static void _irq_notifier_release(struct kref *ref) ref 877 drivers/iommu/amd_iommu_types.h void *ref; /* Pointer to the actual irte */ ref 36 drivers/lightnvm/core.c static void nvm_free(struct kref *ref); ref 505 drivers/lightnvm/core.c kref_put(&dev->ref, nvm_free); ref 1124 drivers/lightnvm/core.c static void nvm_free(struct kref *ref) ref 1126 drivers/lightnvm/core.c struct nvm_dev *dev = container_of(ref, struct nvm_dev, ref); ref 1173 drivers/lightnvm/core.c kref_init(&dev->ref); ref 1184 drivers/lightnvm/core.c kref_put(&dev->ref, nvm_free); ref 1190 drivers/lightnvm/core.c kref_put(&dev->ref, nvm_free); ref 1202 drivers/lightnvm/core.c kref_put(&dev->ref, nvm_free); ref 1224 drivers/lightnvm/core.c kref_put(&dev->ref, nvm_free); ref 1232 drivers/lightnvm/core.c kref_put(&dev->ref, nvm_free); ref 1250 drivers/lightnvm/core.c kref_get(&dev->ref); ref 1253 drivers/lightnvm/core.c kref_put(&dev->ref, nvm_free); ref 1202 drivers/lightnvm/pblk-core.c kref_init(&line->ref); ref 1441 drivers/lightnvm/pblk-core.c kref_put(&line->ref, pblk_line_put_wq); ref 1663 drivers/lightnvm/pblk-core.c void pblk_line_put(struct kref *ref) ref 1665 drivers/lightnvm/pblk-core.c struct pblk_line *line = container_of(ref, struct pblk_line, ref); ref 1671 drivers/lightnvm/pblk-core.c void pblk_line_put_wq(struct kref *ref) ref 1673 drivers/lightnvm/pblk-core.c struct pblk_line *line = container_of(ref, struct pblk_line, ref); ref 2083 drivers/lightnvm/pblk-core.c kref_get(&line->ref); ref 50 drivers/lightnvm/pblk-gc.c kref_put(&gc_rq->line->ref, pblk_line_put); ref 127 drivers/lightnvm/pblk-gc.c kref_put(&line->ref, pblk_line_put); ref 262 drivers/lightnvm/pblk-gc.c kref_get(&line->ref); ref 276 drivers/lightnvm/pblk-gc.c kref_put(&line->ref, pblk_line_put); ref 434 drivers/lightnvm/pblk-gc.c kref_put(&line->ref, pblk_line_put); ref 352 drivers/lightnvm/pblk-init.c static void pblk_destroy_global_caches(struct kref *ref) ref 356 drivers/lightnvm/pblk-init.c c = container_of(ref, struct pblk_global_caches, kref); ref 75 drivers/lightnvm/pblk-map.c kref_get(&line->ref); ref 264 drivers/lightnvm/pblk-rb.c kref_put(&line->ref, pblk_line_put); ref 151 drivers/lightnvm/pblk-recovery.c static void pblk_recov_complete(struct kref *ref) ref 153 drivers/lightnvm/pblk-recovery.c struct pblk_pad_rq *pad_rq = container_of(ref, struct pblk_pad_rq, ref); ref 169 drivers/lightnvm/pblk-recovery.c kref_put(&pad_rq->ref, pblk_recov_complete); ref 205 drivers/lightnvm/pblk-recovery.c kref_init(&pad_rq->ref); ref 261 drivers/lightnvm/pblk-recovery.c kref_get(&pad_rq->ref); ref 268 drivers/lightnvm/pblk-recovery.c kref_put(&pad_rq->ref, pblk_recov_complete); ref 279 drivers/lightnvm/pblk-recovery.c kref_put(&pad_rq->ref, pblk_recov_complete); ref 181 drivers/lightnvm/pblk-write.c kref_put(&line->ref, pblk_line_put); ref 128 drivers/lightnvm/pblk.h struct kref ref; ref 474 drivers/lightnvm/pblk.h struct kref ref; /* Write buffer L2P references */ ref 808 drivers/lightnvm/pblk.h void pblk_line_put(struct kref *ref); ref 809 drivers/lightnvm/pblk.h void pblk_line_put_wq(struct kref *ref); ref 41 drivers/macintosh/windfarm.h struct kref ref; ref 109 drivers/macintosh/windfarm.h struct kref ref; ref 27 drivers/macintosh/windfarm_ad7417_sensor.c struct kref ref; ref 152 drivers/macintosh/windfarm_ad7417_sensor.c static void wf_ad7417_release(struct kref *ref) ref 154 drivers/macintosh/windfarm_ad7417_sensor.c struct wf_ad7417_priv *pv = container_of(ref, ref 155 drivers/macintosh/windfarm_ad7417_sensor.c struct wf_ad7417_priv, ref); ref 164 drivers/macintosh/windfarm_ad7417_sensor.c kref_put(&pv->ref, wf_ad7417_release); ref 187 drivers/macintosh/windfarm_ad7417_sensor.c kref_get(&pv->ref); ref 268 drivers/macintosh/windfarm_ad7417_sensor.c kref_init(&pv->ref); ref 304 drivers/macintosh/windfarm_ad7417_sensor.c kref_put(&pv->ref, wf_ad7417_release); ref 151 drivers/macintosh/windfarm_core.c struct wf_control *ct = container_of(kref, struct wf_control, ref); ref 222 drivers/macintosh/windfarm_core.c kref_init(&new_ct->ref); ref 252 drivers/macintosh/windfarm_core.c kref_put(&ct->ref, wf_control_release); ref 260 drivers/macintosh/windfarm_core.c kref_get(&ct->ref); ref 268 drivers/macintosh/windfarm_core.c kref_put(&ct->ref, wf_control_release); ref 281 drivers/macintosh/windfarm_core.c struct wf_sensor *sr = container_of(kref, struct wf_sensor, ref); ref 317 drivers/macintosh/windfarm_core.c kref_init(&new_sr->ref); ref 355 drivers/macintosh/windfarm_core.c kref_get(&sr->ref); ref 363 drivers/macintosh/windfarm_core.c kref_put(&sr->ref, wf_sensor_release); ref 56 drivers/macintosh/windfarm_fcu_controls.c struct kref ref; ref 71 drivers/macintosh/windfarm_fcu_controls.c static void wf_fcu_release(struct kref *ref) ref 73 drivers/macintosh/windfarm_fcu_controls.c struct wf_fcu_priv *pv = container_of(ref, struct wf_fcu_priv, ref); ref 82 drivers/macintosh/windfarm_fcu_controls.c kref_put(&fan->fcu_priv->ref, wf_fcu_release); ref 397 drivers/macintosh/windfarm_fcu_controls.c kref_get(&pv->ref); ref 528 drivers/macintosh/windfarm_fcu_controls.c kref_init(&pv->ref); ref 575 drivers/macintosh/windfarm_fcu_controls.c kref_put(&pv->ref, wf_fcu_release); ref 28 drivers/macintosh/windfarm_smu_sat.c struct kref ref; ref 168 drivers/macintosh/windfarm_smu_sat.c static void wf_sat_release(struct kref *ref) ref 170 drivers/macintosh/windfarm_smu_sat.c struct wf_sat *sat = container_of(ref, struct wf_sat, ref); ref 183 drivers/macintosh/windfarm_smu_sat.c kref_put(&sat->ref, wf_sat_release); ref 211 drivers/macintosh/windfarm_smu_sat.c kref_init(&sat->ref); ref 283 drivers/macintosh/windfarm_smu_sat.c kref_get(&sat->ref); ref 310 drivers/macintosh/windfarm_smu_sat.c kref_get(&sat->ref); ref 333 drivers/macintosh/windfarm_smu_sat.c kref_put(&sat->ref, wf_sat_release); ref 975 drivers/md/bcache/journal.c atomic_t *ref; ref 979 drivers/md/bcache/journal.c ref = bch_journal(c, &keys, cl); ref 980 drivers/md/bcache/journal.c if (ref) ref 981 drivers/md/bcache/journal.c atomic_dec_bug(ref); ref 104 drivers/md/dm-zoned-metadata.c unsigned int ref; ref 302 drivers/md/dm-zoned-metadata.c mblk->ref = 0; ref 362 drivers/md/dm-zoned-metadata.c mblk->ref++; ref 363 drivers/md/dm-zoned-metadata.c if (mblk->ref == 1 && ref 435 drivers/md/dm-zoned-metadata.c mblk->ref++; ref 518 drivers/md/dm-zoned-metadata.c mblk->ref--; ref 519 drivers/md/dm-zoned-metadata.c if (mblk->ref == 0) { ref 806 drivers/md/dm-zoned-metadata.c if (mblk->ref == 0) ref 2365 drivers/md/dm-zoned-metadata.c (u64)mblk->no, mblk->ref); ref 2383 drivers/md/dm-zoned-metadata.c (u64)mblk->no, mblk->ref); ref 2384 drivers/md/dm-zoned-metadata.c mblk->ref = 0; ref 23 drivers/md/dm-zoned-target.c refcount_t ref; ref 86 drivers/md/dm-zoned-target.c if (refcount_dec_and_test(&bioctx->ref)) { ref 137 drivers/md/dm-zoned-target.c refcount_inc(&bioctx->ref); ref 650 drivers/md/dm-zoned-target.c refcount_set(&bioctx->ref, 1); ref 338 drivers/media/common/siano/smsdvb-debugfs.c static void smsdvb_debugfs_data_release(struct kref *ref) ref 342 drivers/media/common/siano/smsdvb-debugfs.c debug_data = container_of(ref, struct smsdvb_debugfs, refcount); ref 168 drivers/media/dvb-core/dvb_ca_en50221.c static void dvb_ca_private_release(struct kref *ref) ref 172 drivers/media/dvb-core/dvb_ca_en50221.c ca = container_of(ref, struct dvb_ca_private, refcount); ref 145 drivers/media/dvb-core/dvb_frontend.c static void dvb_frontend_free(struct kref *ref) ref 148 drivers/media/dvb-core/dvb_frontend.c container_of(ref, struct dvb_frontend, refcount); ref 918 drivers/media/dvb-frontends/dib0090.c u16 rf, bb, ref; ref 970 drivers/media/dvb-frontends/dib0090.c ref = rf; ref 972 drivers/media/dvb-frontends/dib0090.c if (g[0] == 0 || ref < (g[1] - g[0])) /* if total gain of the current amp is null or this amp is not concerned because it starts to work from an higher gain value */ ref 974 drivers/media/dvb-frontends/dib0090.c else if (ref >= g[1]) /* Gain to set is higher than the high working point of this amp */ ref 977 drivers/media/dvb-frontends/dib0090.c v = ((ref - (g[1] - g[0])) * g[2]) / g[0]; ref 999 drivers/media/dvb-frontends/dib0090.c ref = bb; ref 126 drivers/media/dvb-frontends/ix2505v.c u8 gain, cc, ref, psc, local_osc, lpf; ref 144 drivers/media/dvb-frontends/ix2505v.c ref = 8; /* REF =1 */ ref 147 drivers/media/dvb-frontends/ix2505v.c div_factor = (frequency * ref) / 40; /* local osc = 4Mhz */ ref 252 drivers/media/dvb-frontends/stv6110.c u32 divider, ref, p, presc, i, result_freq, vco_freq; ref 299 drivers/media/dvb-frontends/stv6110.c ref = priv->mclk / ((1 << (r_div_opt + 1)) * (1 << (p + 1))); ref 300 drivers/media/dvb-frontends/stv6110.c divider = (((frequency * 1000) + (ref >> 1)) / ref); ref 913 drivers/media/platform/rcar_jpu.c struct jpu_q_data *src_q_data, *dst_q_data, *orig, adj, *ref; ref 922 drivers/media/platform/rcar_jpu.c ref = dst_q_data; ref 927 drivers/media/platform/rcar_jpu.c ref = src_q_data; ref 931 drivers/media/platform/rcar_jpu.c adj.format.width = ref->format.width; ref 932 drivers/media/platform/rcar_jpu.c adj.format.height = ref->format.height; ref 660 drivers/media/platform/vicodec/codec-fwht.c static void add_deltas(s16 *deltas, const u8 *ref, int stride, ref 667 drivers/media/platform/vicodec/codec-fwht.c *deltas += *ref; ref 668 drivers/media/platform/vicodec/codec-fwht.c ref += ref_step; ref 679 drivers/media/platform/vicodec/codec-fwht.c ref += stride - (8 * ref_step); ref 835 drivers/media/platform/vicodec/codec-fwht.c u32 height, u32 width, const u8 *ref, u32 ref_stride, ref 844 drivers/media/platform/vicodec/codec-fwht.c bool is_intra = !ref; ref 870 drivers/media/platform/vicodec/codec-fwht.c const u8 *refp = ref + j * 8 * ref_stride + ref 911 drivers/media/platform/vicodec/codec-fwht.c unsigned int height, const struct fwht_raw_frame *ref, ref 920 drivers/media/platform/vicodec/codec-fwht.c if (!decode_plane(cf, &rlco, height, width, ref->luma, ref_stride, ref 921 drivers/media/platform/vicodec/codec-fwht.c ref->luma_alpha_step, dst->luma, dst_stride, ref 936 drivers/media/platform/vicodec/codec-fwht.c if (!decode_plane(cf, &rlco, h, w, ref->cb, ref_chroma_stride, ref 937 drivers/media/platform/vicodec/codec-fwht.c ref->chroma_step, dst->cb, dst_chroma_stride, ref 942 drivers/media/platform/vicodec/codec-fwht.c if (!decode_plane(cf, &rlco, h, w, ref->cr, ref_chroma_stride, ref 943 drivers/media/platform/vicodec/codec-fwht.c ref->chroma_step, dst->cr, dst_chroma_stride, ref 951 drivers/media/platform/vicodec/codec-fwht.c if (!decode_plane(cf, &rlco, height, width, ref->alpha, ref_stride, ref 952 drivers/media/platform/vicodec/codec-fwht.c ref->luma_alpha_step, dst->alpha, dst_stride, ref 146 drivers/media/platform/vicodec/codec-fwht.h unsigned int height, const struct fwht_raw_frame *ref, ref 309 drivers/media/usb/em28xx/em28xx-audio.c kref_get(&dev->ref); ref 351 drivers/media/usb/em28xx/em28xx-audio.c kref_put(&dev->ref, em28xx_free_device); ref 919 drivers/media/usb/em28xx/em28xx-audio.c kref_get(&dev->ref); ref 1013 drivers/media/usb/em28xx/em28xx-audio.c kref_put(&dev->ref, em28xx_free_device); ref 3397 drivers/media/usb/em28xx/em28xx-cards.c void em28xx_free_device(struct kref *ref) ref 3399 drivers/media/usb/em28xx/em28xx-cards.c struct em28xx *dev = kref_to_dev(ref); ref 3973 drivers/media/usb/em28xx/em28xx-cards.c kref_init(&dev->dev_next->ref); ref 3976 drivers/media/usb/em28xx/em28xx-cards.c kref_init(&dev->ref); ref 4037 drivers/media/usb/em28xx/em28xx-cards.c kref_put(&dev->dev_next->ref, em28xx_free_device); ref 4040 drivers/media/usb/em28xx/em28xx-cards.c kref_put(&dev->ref, em28xx_free_device); ref 1919 drivers/media/usb/em28xx/em28xx-dvb.c kref_get(&dev->ref); ref 1986 drivers/media/usb/em28xx/em28xx-dvb.c kref_put(&dev->ref, em28xx_free_device); ref 711 drivers/media/usb/em28xx/em28xx-input.c kref_get(&dev->ref); ref 871 drivers/media/usb/em28xx/em28xx-input.c kref_put(&dev->ref, em28xx_free_device); ref 2123 drivers/media/usb/em28xx/em28xx-video.c static void em28xx_free_v4l2(struct kref *ref) ref 2125 drivers/media/usb/em28xx/em28xx-video.c struct em28xx_v4l2 *v4l2 = container_of(ref, struct em28xx_v4l2, ref); ref 2190 drivers/media/usb/em28xx/em28xx-video.c kref_get(&dev->ref); ref 2191 drivers/media/usb/em28xx/em28xx-video.c kref_get(&v4l2->ref); ref 2250 drivers/media/usb/em28xx/em28xx-video.c kref_put(&v4l2->ref, em28xx_free_v4l2); ref 2254 drivers/media/usb/em28xx/em28xx-video.c kref_put(&dev->ref, em28xx_free_device); ref 2326 drivers/media/usb/em28xx/em28xx-video.c kref_put(&v4l2->ref, em28xx_free_v4l2); ref 2328 drivers/media/usb/em28xx/em28xx-video.c kref_put(&dev->ref, em28xx_free_device); ref 2547 drivers/media/usb/em28xx/em28xx-video.c kref_init(&v4l2->ref); ref 2882 drivers/media/usb/em28xx/em28xx-video.c kref_get(&dev->ref); ref 2911 drivers/media/usb/em28xx/em28xx-video.c kref_put(&v4l2->ref, em28xx_free_v4l2); ref 548 drivers/media/usb/em28xx/em28xx.h struct kref ref; ref 644 drivers/media/usb/em28xx/em28xx.h struct kref ref; ref 784 drivers/media/usb/em28xx/em28xx.h #define kref_to_dev(d) container_of(d, struct em28xx, ref) ref 847 drivers/media/usb/em28xx/em28xx.h void em28xx_free_device(struct kref *ref); ref 1890 drivers/media/usb/uvc/uvc_driver.c struct uvc_device *dev = container_of(kref, struct uvc_device, ref); ref 1934 drivers/media/usb/uvc/uvc_driver.c kref_put(&dev->ref, uvc_delete); ref 2024 drivers/media/usb/uvc/uvc_driver.c kref_get(&dev->ref); ref 2145 drivers/media/usb/uvc/uvc_driver.c kref_init(&dev->ref); ref 2253 drivers/media/usb/uvc/uvc_driver.c kref_put(&dev->ref, uvc_delete); ref 2271 drivers/media/usb/uvc/uvc_driver.c kref_put(&dev->ref, uvc_delete); ref 140 drivers/media/usb/uvc/uvc_queue.c kref_init(&buf->ref); ref 476 drivers/media/usb/uvc/uvc_queue.c static void uvc_queue_buffer_complete(struct kref *ref) ref 478 drivers/media/usb/uvc/uvc_queue.c struct uvc_buffer *buf = container_of(ref, struct uvc_buffer, ref); ref 498 drivers/media/usb/uvc/uvc_queue.c kref_put(&buf->ref, uvc_queue_buffer_complete); ref 1140 drivers/media/usb/uvc/uvc_video.c kref_get(&buf->ref); ref 419 drivers/media/usb/uvc/uvcvideo.h struct kref ref; ref 661 drivers/media/usb/uvc/uvcvideo.h struct kref ref; ref 37 drivers/media/v4l2-core/v4l2-ctrls.c struct v4l2_ctrl_ref *ref; ref 1881 drivers/media/v4l2-core/v4l2-ctrls.c struct v4l2_ctrl_ref *ref) ref 1883 drivers/media/v4l2-core/v4l2-ctrls.c return ptr_to_user(c, ref->ctrl, ref->p_req); ref 2006 drivers/media/v4l2-core/v4l2-ctrls.c static void new_to_req(struct v4l2_ctrl_ref *ref) ref 2008 drivers/media/v4l2-core/v4l2-ctrls.c if (!ref) ref 2010 drivers/media/v4l2-core/v4l2-ctrls.c ptr_to_ptr(ref->ctrl, ref->ctrl->p_new, ref->p_req); ref 2011 drivers/media/v4l2-core/v4l2-ctrls.c ref->req = ref; ref 2015 drivers/media/v4l2-core/v4l2-ctrls.c static void req_to_new(struct v4l2_ctrl_ref *ref) ref 2017 drivers/media/v4l2-core/v4l2-ctrls.c if (!ref) ref 2019 drivers/media/v4l2-core/v4l2-ctrls.c if (ref->req) ref 2020 drivers/media/v4l2-core/v4l2-ctrls.c ptr_to_ptr(ref->ctrl, ref->req->p_req, ref->ctrl->p_new); ref 2022 drivers/media/v4l2-core/v4l2-ctrls.c ptr_to_ptr(ref->ctrl, ref->ctrl->p_cur, ref->ctrl->p_new); ref 2150 drivers/media/v4l2-core/v4l2-ctrls.c struct v4l2_ctrl_ref *ref, *next_ref; ref 2167 drivers/media/v4l2-core/v4l2-ctrls.c list_for_each_entry_safe(ref, next_ref, &hdl->ctrl_refs, node) { ref 2168 drivers/media/v4l2-core/v4l2-ctrls.c list_del(&ref->node); ref 2169 drivers/media/v4l2-core/v4l2-ctrls.c kfree(ref); ref 2196 drivers/media/v4l2-core/v4l2-ctrls.c struct v4l2_ctrl_ref *ref; ref 2199 drivers/media/v4l2-core/v4l2-ctrls.c list_for_each_entry(ref, &hdl->ctrl_refs, node) { ref 2202 drivers/media/v4l2-core/v4l2-ctrls.c if (V4L2_CTRL_ID2WHICH(ref->ctrl->id) == V4L2_CTRL_CLASS_USER && ref 2203 drivers/media/v4l2-core/v4l2-ctrls.c V4L2_CTRL_DRIVER_PRIV(ref->ctrl->id)) { ref 2204 drivers/media/v4l2-core/v4l2-ctrls.c if (!ref->ctrl->is_int) ref 2207 drivers/media/v4l2-core/v4l2-ctrls.c return ref; ref 2217 drivers/media/v4l2-core/v4l2-ctrls.c struct v4l2_ctrl_ref *ref; ref 2232 drivers/media/v4l2-core/v4l2-ctrls.c ref = hdl->buckets ? hdl->buckets[bucket] : NULL; ref 2233 drivers/media/v4l2-core/v4l2-ctrls.c while (ref && ref->ctrl->id != id) ref 2234 drivers/media/v4l2-core/v4l2-ctrls.c ref = ref->next; ref 2236 drivers/media/v4l2-core/v4l2-ctrls.c if (ref) ref 2237 drivers/media/v4l2-core/v4l2-ctrls.c hdl->cached = ref; /* cache it! */ ref 2238 drivers/media/v4l2-core/v4l2-ctrls.c return ref; ref 2245 drivers/media/v4l2-core/v4l2-ctrls.c struct v4l2_ctrl_ref *ref = NULL; ref 2249 drivers/media/v4l2-core/v4l2-ctrls.c ref = find_ref(hdl, id); ref 2252 drivers/media/v4l2-core/v4l2-ctrls.c return ref; ref 2258 drivers/media/v4l2-core/v4l2-ctrls.c struct v4l2_ctrl_ref *ref = find_ref_lock(hdl, id); ref 2260 drivers/media/v4l2-core/v4l2-ctrls.c return ref ? ref->ctrl : NULL; ref 2270 drivers/media/v4l2-core/v4l2-ctrls.c struct v4l2_ctrl_ref *ref; ref 2316 drivers/media/v4l2-core/v4l2-ctrls.c list_for_each_entry(ref, &hdl->ctrl_refs, node) { ref 2317 drivers/media/v4l2-core/v4l2-ctrls.c if (ref->ctrl->id < id) ref 2320 drivers/media/v4l2-core/v4l2-ctrls.c if (ref->ctrl->id == id) { ref 2324 drivers/media/v4l2-core/v4l2-ctrls.c list_add(&new_ref->node, ref->node.prev); ref 2682 drivers/media/v4l2-core/v4l2-ctrls.c struct v4l2_ctrl_ref *ref; ref 2691 drivers/media/v4l2-core/v4l2-ctrls.c list_for_each_entry(ref, &add->ctrl_refs, node) { ref 2692 drivers/media/v4l2-core/v4l2-ctrls.c struct v4l2_ctrl *ctrl = ref->ctrl; ref 2938 drivers/media/v4l2-core/v4l2-ctrls.c struct v4l2_ctrl_ref *ref; ref 2947 drivers/media/v4l2-core/v4l2-ctrls.c ref = find_ref(hdl, id); ref 2967 drivers/media/v4l2-core/v4l2-ctrls.c ref = NULL; /* Yes, so there is no next control */ ref 2968 drivers/media/v4l2-core/v4l2-ctrls.c } else if (ref) { ref 2971 drivers/media/v4l2-core/v4l2-ctrls.c list_for_each_entry_continue(ref, &hdl->ctrl_refs, node) { ref 2972 drivers/media/v4l2-core/v4l2-ctrls.c is_compound = ref->ctrl->is_array || ref 2973 drivers/media/v4l2-core/v4l2-ctrls.c ref->ctrl->type >= V4L2_CTRL_COMPOUND_TYPES; ref 2974 drivers/media/v4l2-core/v4l2-ctrls.c if (id < ref->ctrl->id && ref 2978 drivers/media/v4l2-core/v4l2-ctrls.c if (&ref->node == &hdl->ctrl_refs) ref 2979 drivers/media/v4l2-core/v4l2-ctrls.c ref = NULL; ref 2985 drivers/media/v4l2-core/v4l2-ctrls.c list_for_each_entry(ref, &hdl->ctrl_refs, node) { ref 2986 drivers/media/v4l2-core/v4l2-ctrls.c is_compound = ref->ctrl->is_array || ref 2987 drivers/media/v4l2-core/v4l2-ctrls.c ref->ctrl->type >= V4L2_CTRL_COMPOUND_TYPES; ref 2988 drivers/media/v4l2-core/v4l2-ctrls.c if (id < ref->ctrl->id && ref 2992 drivers/media/v4l2-core/v4l2-ctrls.c if (&ref->node == &hdl->ctrl_refs) ref 2993 drivers/media/v4l2-core/v4l2-ctrls.c ref = NULL; ref 2998 drivers/media/v4l2-core/v4l2-ctrls.c if (!ref) ref 3001 drivers/media/v4l2-core/v4l2-ctrls.c ctrl = ref->ctrl; ref 3109 drivers/media/v4l2-core/v4l2-ctrls.c struct v4l2_ctrl_ref *ref; ref 3121 drivers/media/v4l2-core/v4l2-ctrls.c list_for_each_entry(ref, &from->ctrl_refs, node) { ref 3122 drivers/media/v4l2-core/v4l2-ctrls.c struct v4l2_ctrl *ctrl = ref->ctrl; ref 3126 drivers/media/v4l2-core/v4l2-ctrls.c if (ref->from_other_dev) ref 3231 drivers/media/v4l2-core/v4l2-ctrls.c struct v4l2_ctrl_ref *ref = find_ref_lock(hdl, id); ref 3233 drivers/media/v4l2-core/v4l2-ctrls.c return (ref && ref->req == ref) ? ref->ctrl : NULL; ref 3307 drivers/media/v4l2-core/v4l2-ctrls.c struct v4l2_ctrl_ref *ref; ref 3330 drivers/media/v4l2-core/v4l2-ctrls.c ref = find_ref_lock(hdl, id); ref 3331 drivers/media/v4l2-core/v4l2-ctrls.c if (ref == NULL) { ref 3335 drivers/media/v4l2-core/v4l2-ctrls.c h->ref = ref; ref 3336 drivers/media/v4l2-core/v4l2-ctrls.c ctrl = ref->ctrl; ref 3345 drivers/media/v4l2-core/v4l2-ctrls.c ref = find_ref_lock(hdl, ctrl->cluster[0]->id); ref 3366 drivers/media/v4l2-core/v4l2-ctrls.c h->mref = ref; ref 3452 drivers/media/v4l2-core/v4l2-ctrls.c if (helpers[i].ref->ctrl->flags & V4L2_CTRL_FLAG_WRITE_ONLY) ref 3486 drivers/media/v4l2-core/v4l2-ctrls.c if (helpers[idx].ref->req) ref 3488 drivers/media/v4l2-core/v4l2-ctrls.c helpers[idx].ref->req); ref 3491 drivers/media/v4l2-core/v4l2-ctrls.c helpers[idx].ref->ctrl); ref 3730 drivers/media/v4l2-core/v4l2-ctrls.c struct v4l2_ctrl *ctrl = helpers[i].ref->ctrl; ref 3857 drivers/media/v4l2-core/v4l2-ctrls.c if (helpers[tmp_idx].ref->ctrl == master) ref 3870 drivers/media/v4l2-core/v4l2-ctrls.c struct v4l2_ctrl *ctrl = helpers[idx].ref->ctrl; ref 3883 drivers/media/v4l2-core/v4l2-ctrls.c struct v4l2_ctrl_ref *ref = ref 3886 drivers/media/v4l2-core/v4l2-ctrls.c new_to_req(ref); ref 3895 drivers/media/v4l2-core/v4l2-ctrls.c helpers[idx].ref->ctrl); ref 4093 drivers/media/v4l2-core/v4l2-ctrls.c struct v4l2_ctrl_ref *ref; ref 4108 drivers/media/v4l2-core/v4l2-ctrls.c list_for_each_entry(ref, &hdl->ctrl_refs, node) { ref 4109 drivers/media/v4l2-core/v4l2-ctrls.c struct v4l2_ctrl *ctrl = ref->ctrl; ref 4114 drivers/media/v4l2-core/v4l2-ctrls.c ref->req = ref; ref 4121 drivers/media/v4l2-core/v4l2-ctrls.c new_to_req(ref); ref 4125 drivers/media/v4l2-core/v4l2-ctrls.c if (ref->req == ref) ref 4129 drivers/media/v4l2-core/v4l2-ctrls.c if (ref->req) ref 4130 drivers/media/v4l2-core/v4l2-ctrls.c ptr_to_ptr(ctrl, ref->req->p_req, ref->p_req); ref 4132 drivers/media/v4l2-core/v4l2-ctrls.c ptr_to_ptr(ctrl, ctrl->p_cur, ref->p_req); ref 4151 drivers/media/v4l2-core/v4l2-ctrls.c struct v4l2_ctrl_ref *ref; ref 4174 drivers/media/v4l2-core/v4l2-ctrls.c list_for_each_entry(ref, &hdl->ctrl_refs, node) ref 4175 drivers/media/v4l2-core/v4l2-ctrls.c ref->req_done = false; ref 4177 drivers/media/v4l2-core/v4l2-ctrls.c list_for_each_entry(ref, &hdl->ctrl_refs, node) { ref 4178 drivers/media/v4l2-core/v4l2-ctrls.c struct v4l2_ctrl *ctrl = ref->ctrl; ref 4187 drivers/media/v4l2-core/v4l2-ctrls.c if (ref->req_done || ctrl->type == V4L2_CTRL_TYPE_BUTTON || ref 25 drivers/media/v4l2-core/v4l2-device.c kref_init(&v4l2_dev->ref); ref 45 drivers/media/v4l2-core/v4l2-device.c static void v4l2_device_release(struct kref *ref) ref 48 drivers/media/v4l2-core/v4l2-device.c container_of(ref, struct v4l2_device, ref); ref 56 drivers/media/v4l2-core/v4l2-device.c return kref_put(&v4l2_dev->ref, v4l2_device_release); ref 208 drivers/misc/fastrpc.c static void fastrpc_free_map(struct kref *ref) ref 212 drivers/misc/fastrpc.c map = container_of(ref, struct fastrpc_map, refcount); ref 296 drivers/misc/fastrpc.c static void fastrpc_channel_ctx_free(struct kref *ref) ref 300 drivers/misc/fastrpc.c cctx = container_of(ref, struct fastrpc_channel_ctx, refcount); ref 315 drivers/misc/fastrpc.c static void fastrpc_context_free(struct kref *ref) ref 322 drivers/misc/fastrpc.c ctx = container_of(ref, struct fastrpc_invoke_ctx, refcount); ref 33 drivers/misc/habanalabs/command_buffer.c static void cb_release(struct kref *ref) ref 38 drivers/misc/habanalabs/command_buffer.c cb = container_of(ref, struct hl_cb, refcount); ref 17 drivers/misc/habanalabs/command_submission.c static void cs_do_release(struct kref *ref); ref 159 drivers/misc/habanalabs/command_submission.c static void cs_do_release(struct kref *ref) ref 161 drivers/misc/habanalabs/command_submission.c struct hl_cs *cs = container_of(ref, struct hl_cs, ref 45 drivers/misc/habanalabs/context.c void hl_ctx_do_release(struct kref *ref) ref 49 drivers/misc/habanalabs/context.c ctx = container_of(ref, struct hl_ctx, refcount); ref 41 drivers/misc/habanalabs/device.c static void hpriv_release(struct kref *ref) ref 47 drivers/misc/habanalabs/device.c hpriv = container_of(ref, struct hl_fpriv, refcount); ref 1463 drivers/misc/habanalabs/habanalabs.h void hl_ctx_do_release(struct kref *ref); ref 242 drivers/misc/habanalabs/memory.c static void dram_pg_pool_do_release(struct kref *ref) ref 244 drivers/misc/habanalabs/memory.c struct hl_vm *vm = container_of(ref, struct hl_vm, ref 17 drivers/misc/lkdtm/refcount.c static void overflow_check(refcount_t *ref) ref 19 drivers/misc/lkdtm/refcount.c switch (refcount_read(ref)) { ref 27 drivers/misc/lkdtm/refcount.c pr_err("Fail: refcount wrapped to %d\n", refcount_read(ref)); ref 92 drivers/misc/lkdtm/refcount.c static void check_zero(refcount_t *ref) ref 94 drivers/misc/lkdtm/refcount.c switch (refcount_read(ref)) { ref 105 drivers/misc/lkdtm/refcount.c pr_err("Fail: refcount went crazy: %d\n", refcount_read(ref)); ref 127 drivers/misc/lkdtm/refcount.c static void check_negative(refcount_t *ref, int start) ref 134 drivers/misc/lkdtm/refcount.c if (refcount_read(ref) == start) { ref 140 drivers/misc/lkdtm/refcount.c switch (refcount_read(ref)) { ref 148 drivers/misc/lkdtm/refcount.c pr_err("Fail: refcount went crazy: %d\n", refcount_read(ref)); ref 193 drivers/misc/lkdtm/refcount.c static void check_from_zero(refcount_t *ref) ref 195 drivers/misc/lkdtm/refcount.c switch (refcount_read(ref)) { ref 207 drivers/misc/lkdtm/refcount.c refcount_read(ref)); ref 261 drivers/misc/lkdtm/refcount.c static void check_saturated(refcount_t *ref) ref 263 drivers/misc/lkdtm/refcount.c switch (refcount_read(ref)) { ref 271 drivers/misc/lkdtm/refcount.c pr_err("Fail: refcount went crazy: %d\n", refcount_read(ref)); ref 54 drivers/misc/mei/client.c static void mei_me_cl_release(struct kref *ref) ref 57 drivers/misc/mei/client.c container_of(ref, struct mei_me_client, refcnt); ref 66 drivers/misc/mic/host/mic_smpt.c static void mic_add_smpt_entry(int spt, s64 *ref, u64 addr, ref 79 drivers/misc/mic/host/mic_smpt.c smpt_info->entry[i].ref_count += ref[i - spt]; ref 88 drivers/misc/mic/host/mic_smpt.c int entries, s64 *ref, size_t size) ref 126 drivers/misc/mic/host/mic_smpt.c mic_add_smpt_entry(spt, ref, dma_addr, entries, mdev); ref 139 drivers/misc/mic/host/mic_smpt.c size_t size, s64 *ref, u64 *smpt_start) ref 146 drivers/misc/mic/host/mic_smpt.c ref[i++] = min(mic_smpt_align_high(mdev, start + 1), ref 198 drivers/misc/mic/host/mic_smpt.c s64 *ref; ref 204 drivers/misc/mic/host/mic_smpt.c ref = kmalloc_array(mdev->smpt->info.num_reg, sizeof(s64), GFP_ATOMIC); ref 205 drivers/misc/mic/host/mic_smpt.c if (!ref) ref 209 drivers/misc/mic/host/mic_smpt.c ref, &smpt_start); ref 212 drivers/misc/mic/host/mic_smpt.c mic_addr = mic_smpt_op(mdev, smpt_start, num_entries, ref, size); ref 214 drivers/misc/mic/host/mic_smpt.c kfree(ref); ref 245 drivers/misc/mic/host/mic_smpt.c s64 *ref; ref 261 drivers/misc/mic/host/mic_smpt.c ref = kmalloc_array(mdev->smpt->info.num_reg, sizeof(s64), GFP_ATOMIC); ref 262 drivers/misc/mic/host/mic_smpt.c if (!ref) ref 266 drivers/misc/mic/host/mic_smpt.c num_smpt = mic_get_smpt_ref_count(mdev, mic_addr, size, ref, NULL); ref 273 drivers/misc/mic/host/mic_smpt.c smpt_info->entry[i].ref_count -= ref[i - spt]; ref 279 drivers/misc/mic/host/mic_smpt.c kfree(ref); ref 502 drivers/misc/mic/scif/scif_mmap.c struct kref ref; ref 505 drivers/misc/mic/scif/scif_mmap.c static void vma_pvt_release(struct kref *ref) ref 507 drivers/misc/mic/scif/scif_mmap.c struct vma_pvt *vmapvt = container_of(ref, struct vma_pvt, ref); ref 532 drivers/misc/mic/scif/scif_mmap.c kref_get(&vmapvt->ref); ref 585 drivers/misc/mic/scif/scif_mmap.c kref_put(&vmapvt->ref, vma_pvt_release); ref 632 drivers/misc/mic/scif/scif_mmap.c kref_init(&vmapvt->ref); ref 81 drivers/misc/ocxl/link.c struct kref ref; ref 385 drivers/misc/ocxl/link.c kref_init(&link->ref); ref 435 drivers/misc/ocxl/link.c kref_get(&link->ref); ref 452 drivers/misc/ocxl/link.c static void release_xsl(struct kref *ref) ref 454 drivers/misc/ocxl/link.c struct ocxl_link *link = container_of(ref, struct ocxl_link, ref); ref 467 drivers/misc/ocxl/link.c kref_put(&link->ref, release_xsl); ref 31 drivers/mtd/mtd_blkdevs.c container_of(kref, struct mtd_blktrans_dev, ref); ref 51 drivers/mtd/mtd_blkdevs.c kref_get(&dev->ref); ref 60 drivers/mtd/mtd_blkdevs.c kref_put(&dev->ref, blktrans_dev_release); ref 218 drivers/mtd/mtd_blkdevs.c kref_get(&dev->ref); ref 247 drivers/mtd/mtd_blkdevs.c kref_put(&dev->ref, blktrans_dev_release); ref 267 drivers/mtd/mtd_blkdevs.c kref_put(&dev->ref, blktrans_dev_release); ref 393 drivers/mtd/mtd_blkdevs.c kref_init(&new->ref); ref 3060 drivers/net/ethernet/hisilicon/hns3/hns3pf/hclge_main.c static void hclge_irq_affinity_release(struct kref *ref) ref 3758 drivers/net/ethernet/intel/i40e/i40e_main.c static void i40e_irq_affinity_release(struct kref *ref) {} ref 404 drivers/net/ethernet/intel/iavf/iavf_main.c static void iavf_irq_affinity_release(struct kref *ref) {} ref 1579 drivers/net/ethernet/intel/ice/ice_main.c static void ice_irq_affinity_release(struct kref __always_unused *ref) {} ref 57 drivers/net/ethernet/mellanox/mlx5/core/fpga/tls.c refcount_t ref; ref 65 drivers/net/ethernet/mellanox/mlx5/core/fpga/tls.c if (refcount_dec_and_test(&ctx->ref)) ref 107 drivers/net/ethernet/mellanox/mlx5/core/fpga/tls.c refcount_set(&cmd->ref, 2); ref 89 drivers/net/ethernet/qlogic/qede/qede_rdma.c static void qede_rdma_complete_event(struct kref *ref) ref 92 drivers/net/ethernet/qlogic/qede/qede_rdma.c container_of(ref, struct qede_rdma_dev, refcnt); ref 1498 drivers/net/ethernet/rocker/rocker_ofdpa.c int ref = 0; ref 1526 drivers/net/ethernet/rocker/rocker_ofdpa.c ref++; ref 1529 drivers/net/ethernet/rocker/rocker_ofdpa.c if ((!adding || ref != 1) && (adding || ref != 0)) ref 568 drivers/net/phy/phylink.c struct fwnode_reference_args ref; ref 575 drivers/net/phy/phylink.c 0, 0, &ref); ref 585 drivers/net/phy/phylink.c pl->sfp_bus = sfp_register_upstream(ref.fwnode, pl, &sfp_phylink_ops); ref 258 drivers/net/usb/hso.c struct kref ref; ref 310 drivers/net/usb/hso.c static void hso_serial_ref_free(struct kref *ref); ref 1275 drivers/net/usb/hso.c kref_get(&serial->parent->ref); ref 1382 drivers/net/usb/hso.c kref_put(&serial->parent->ref, hso_serial_ref_free); ref 2329 drivers/net/usb/hso.c kref_init(&hso_dev->ref); ref 3089 drivers/net/usb/hso.c static void hso_serial_ref_free(struct kref *ref) ref 3091 drivers/net/usb/hso.c struct hso_device *hso_dev = container_of(ref, struct hso_device, ref); ref 3112 drivers/net/usb/hso.c kref_put(&serial_table[i]->ref, hso_serial_ref_free); ref 927 drivers/net/wimax/i2400m/rx.c static void i2400m_rx_roq_destroy(struct kref *ref) ref 931 drivers/net/wimax/i2400m/rx.c = container_of(ref, struct i2400m, rx_roq_refcount); ref 496 drivers/net/wireless/ath/carl9170/carl9170.h struct kref ref; ref 262 drivers/net/wireless/ath/carl9170/tx.c static void carl9170_tx_release(struct kref *ref) ref 269 drivers/net/wireless/ath/carl9170/tx.c arinfo = container_of(ref, struct carl9170_tx_info, ref); ref 334 drivers/net/wireless/ath/carl9170/tx.c kref_get(&arinfo->ref); ref 342 drivers/net/wireless/ath/carl9170/tx.c return kref_put(&arinfo->ref, carl9170_tx_release); ref 1074 drivers/net/wireless/ath/carl9170/tx.c kref_init(&arinfo->ref); ref 3223 drivers/net/wireless/ath/wcn36xx/hal.h u8 ref; ref 505 drivers/net/wireless/broadcom/b43/phy_lp.c u32 crystalfreq, tmp, ref; ref 569 drivers/net/wireless/broadcom/b43/phy_lp.c ref = (1000 * lpphy->pdiv + 2 * crystalfreq) / (2000 * lpphy->pdiv); ref 570 drivers/net/wireless/broadcom/b43/phy_lp.c ref &= 0xFFFF; ref 572 drivers/net/wireless/broadcom/b43/phy_lp.c if (ref < freqdata_tab[i].freq) { ref 221 drivers/net/wireless/intel/iwlwifi/mvm/mac80211.c mvm->phy_ctxts[i].ref = 0; ref 1271 drivers/net/wireless/intel/iwlwifi/mvm/mac80211.c if (!mvm->phy_ctxts[i].ref) ref 3759 drivers/net/wireless/intel/iwlwifi/mvm/mac80211.c if (phy_ctxt->ref == 0 || mvmvif->phy_ctxt == phy_ctxt) ref 3762 drivers/net/wireless/intel/iwlwifi/mvm/mac80211.c if (phy_ctxt->ref && channel == phy_ctxt->channel) { ref 3795 drivers/net/wireless/intel/iwlwifi/mvm/mac80211.c if (mvmvif->phy_ctxt->ref == 1) { ref 3968 drivers/net/wireless/intel/iwlwifi/mvm/mac80211.c if (WARN_ONCE((phy_ctxt->ref > 1) && ref 3974 drivers/net/wireless/intel/iwlwifi/mvm/mac80211.c phy_ctxt->ref, changed)) ref 150 drivers/net/wireless/intel/iwlwifi/mvm/mvm.h u32 ref; ref 219 drivers/net/wireless/intel/iwlwifi/mvm/phy-ctxt.c ctxt->ref); ref 236 drivers/net/wireless/intel/iwlwifi/mvm/phy-ctxt.c ctxt->ref++; ref 282 drivers/net/wireless/intel/iwlwifi/mvm/phy-ctxt.c ctxt->ref--; ref 289 drivers/net/wireless/intel/iwlwifi/mvm/phy-ctxt.c if (ctxt->ref == 0) { ref 594 drivers/net/wireless/mediatek/mt76/mt7603/mac.c struct ieee80211_tx_rate *ref; ref 617 drivers/net/wireless/mediatek/mt76/mt7603/mac.c ref = &sta->rateset[rateset].probe_rate; ref 620 drivers/net/wireless/mediatek/mt76/mt7603/mac.c ref = &sta->rateset[rateset].rates[0]; ref 632 drivers/net/wireless/mediatek/mt76/mt7603/mac.c if ((ref->flags ^ rates[i].flags) & IEEE80211_TX_RC_SHORT_GI) ref 467 drivers/net/wireless/mediatek/mt76/mt7615/mac.c struct ieee80211_tx_rate *ref; ref 490 drivers/net/wireless/mediatek/mt76/mt7615/mac.c ref = &sta->rateset[rateset].probe_rate; ref 493 drivers/net/wireless/mediatek/mt76/mt7615/mac.c ref = &sta->rateset[rateset].rates[0]; ref 505 drivers/net/wireless/mediatek/mt76/mt7615/mac.c if ((ref->flags ^ rates[i].flags) & IEEE80211_TX_RC_SHORT_GI) ref 293 drivers/net/xen-netback/hash.c .source.u.ref = gref, ref 339 drivers/net/xen-netback/hash.c .source.u.ref = gref, ref 463 drivers/net/xen-netback/netback.c (*gopp_copy)->source.u.ref); ref 508 drivers/net/xen-netback/netback.c gop_map->ref); ref 981 drivers/net/xen-netback/netback.c queue->tx_copy_ops[*copy_ops].source.u.ref = txreq.gref; ref 187 drivers/net/xen-netback/rx.c op->source.u.ref = foreign->gref; ref 195 drivers/net/xen-netback/rx.c op->dest.u.ref = req->gref; ref 218 drivers/net/xen-netfront.c grant_ref_t ref = queue->grant_rx_ref[i]; ref 220 drivers/net/xen-netfront.c return ref; ref 297 drivers/net/xen-netfront.c grant_ref_t ref; ref 312 drivers/net/xen-netfront.c ref = gnttab_claim_grant_reference(&queue->gref_rx_head); ref 313 drivers/net/xen-netfront.c WARN_ON_ONCE(IS_ERR_VALUE((unsigned long)(int)ref)); ref 314 drivers/net/xen-netfront.c queue->grant_rx_ref[id] = ref; ref 319 drivers/net/xen-netfront.c gnttab_page_grant_foreign_access_ref_one(ref, ref 324 drivers/net/xen-netfront.c req->gref = ref; ref 434 drivers/net/xen-netfront.c grant_ref_t ref; ref 442 drivers/net/xen-netfront.c ref = gnttab_claim_grant_reference(&queue->gref_tx_head); ref 443 drivers/net/xen-netfront.c WARN_ON_ONCE(IS_ERR_VALUE((unsigned long)(int)ref)); ref 445 drivers/net/xen-netfront.c gnttab_grant_foreign_access_ref(ref, queue->info->xbdev->otherend_id, ref 450 drivers/net/xen-netfront.c queue->grant_tx_ref[id] = ref; ref 453 drivers/net/xen-netfront.c tx->gref = ref; ref 725 drivers/net/xen-netfront.c grant_ref_t ref) ref 731 drivers/net/xen-netfront.c queue->grant_rx_ref[new] = ref; ref 733 drivers/net/xen-netfront.c RING_GET_REQUEST(&queue->rx, queue->rx.req_prod_pvt)->gref = ref; ref 749 drivers/net/xen-netfront.c grant_ref_t ref; ref 773 drivers/net/xen-netfront.c ref = xennet_get_rx_ref(queue, cons); ref 774 drivers/net/xen-netfront.c xennet_move_rx_slot(queue, skb, ref); ref 790 drivers/net/xen-netfront.c grant_ref_t ref = xennet_get_rx_ref(queue, cons); ref 807 drivers/net/xen-netfront.c xennet_move_rx_slot(queue, skb, ref); ref 817 drivers/net/xen-netfront.c if (ref == GRANT_INVALID_REF) { ref 825 drivers/net/xen-netfront.c ret = gnttab_end_foreign_access_ref(ref, 0); ref 828 drivers/net/xen-netfront.c gnttab_release_grant_reference(&queue->gref_rx_head, ref); ref 845 drivers/net/xen-netfront.c ref = xennet_get_rx_ref(queue, cons + slots); ref 1153 drivers/net/xen-netfront.c int id, ref; ref 1165 drivers/net/xen-netfront.c ref = queue->grant_rx_ref[id]; ref 1166 drivers/net/xen-netfront.c if (ref == GRANT_INVALID_REF) ref 1175 drivers/net/xen-netfront.c gnttab_end_foreign_access(ref, 0, ref 1378 drivers/net/xen-netfront.c static void xennet_end_access(int ref, void *page) ref 1381 drivers/net/xen-netfront.c if (ref != GRANT_INVALID_REF) ref 1382 drivers/net/xen-netfront.c gnttab_end_foreign_access(ref, 0, (unsigned long)page); ref 313 drivers/nvdimm/pmem.c container_of(pgmap->ref, struct request_queue, q_usage_counter); ref 326 drivers/nvdimm/pmem.c container_of(pgmap->ref, struct request_queue, q_usage_counter); ref 406 drivers/nvdimm/pmem.c pmem->pgmap.ref = &q->q_usage_counter; ref 434 drivers/nvme/host/core.c static void nvme_free_ns_head(struct kref *ref) ref 437 drivers/nvme/host/core.c container_of(ref, struct nvme_ns_head, ref); ref 449 drivers/nvme/host/core.c kref_put(&head->ref, nvme_free_ns_head); ref 2050 drivers/nvme/host/core.c if (!kref_get_unless_zero(&head->ref)) ref 2496 drivers/nvme/host/core.c static void nvme_destroy_subsystem(struct kref *ref) ref 2499 drivers/nvme/host/core.c container_of(ref, struct nvme_subsystem, ref); ref 2512 drivers/nvme/host/core.c kref_put(&subsys->ref, nvme_destroy_subsystem); ref 2535 drivers/nvme/host/core.c if (!kref_get_unless_zero(&subsys->ref)) ref 2635 drivers/nvme/host/core.c kref_init(&subsys->ref); ref 3302 drivers/nvme/host/core.c if (h->ns_id == nsid && kref_get_unless_zero(&h->ref)) ref 3350 drivers/nvme/host/core.c kref_init(&head->ref); ref 3369 drivers/nvme/host/core.c kref_get(&ctrl->subsys->ref); ref 43 drivers/nvme/host/fabrics.c kref_get(&host->ref); ref 51 drivers/nvme/host/fabrics.c kref_init(&host->ref); ref 68 drivers/nvme/host/fabrics.c kref_init(&host->ref); ref 80 drivers/nvme/host/fabrics.c static void nvmf_host_destroy(struct kref *ref) ref 82 drivers/nvme/host/fabrics.c struct nvmf_host *host = container_of(ref, struct nvmf_host, ref); ref 94 drivers/nvme/host/fabrics.c kref_put(&host->ref, nvmf_host_destroy); ref 891 drivers/nvme/host/fabrics.c kref_get(&nvmf_default_host->ref); ref 28 drivers/nvme/host/fabrics.h struct kref ref; ref 110 drivers/nvme/host/fc.c struct kref ref; ref 124 drivers/nvme/host/fc.c struct kref ref; ref 154 drivers/nvme/host/fc.c struct kref ref; ref 223 drivers/nvme/host/fc.c nvme_fc_free_lport(struct kref *ref) ref 226 drivers/nvme/host/fc.c container_of(ref, struct nvme_fc_lport, ref); ref 250 drivers/nvme/host/fc.c kref_put(&lport->ref, nvme_fc_free_lport); ref 256 drivers/nvme/host/fc.c return kref_get_unless_zero(&lport->ref); ref 392 drivers/nvme/host/fc.c kref_init(&newrec->ref); ref 495 drivers/nvme/host/fc.c nvme_fc_free_rport(struct kref *ref) ref 498 drivers/nvme/host/fc.c container_of(ref, struct nvme_fc_rport, ref); ref 522 drivers/nvme/host/fc.c kref_put(&rport->ref, nvme_fc_free_rport); ref 528 drivers/nvme/host/fc.c return kref_get_unless_zero(&rport->ref); ref 700 drivers/nvme/host/fc.c kref_init(&newrec->ref); ref 2014 drivers/nvme/host/fc.c nvme_fc_ctrl_free(struct kref *ref) ref 2017 drivers/nvme/host/fc.c container_of(ref, struct nvme_fc_ctrl, ref); ref 2049 drivers/nvme/host/fc.c kref_put(&ctrl->ref, nvme_fc_ctrl_free); ref 2055 drivers/nvme/host/fc.c return kref_get_unless_zero(&ctrl->ref); ref 3094 drivers/nvme/host/fc.c kref_init(&ctrl->ref); ref 299 drivers/nvme/host/nvme.h struct kref ref; ref 340 drivers/nvme/host/nvme.h struct kref ref; ref 40 drivers/nvme/host/rdma.c struct kref ref; ref 58 drivers/nvme/host/rdma.c refcount_t ref; ref 330 drivers/nvme/host/rdma.c static void nvme_rdma_free_dev(struct kref *ref) ref 333 drivers/nvme/host/rdma.c container_of(ref, struct nvme_rdma_device, ref); ref 345 drivers/nvme/host/rdma.c kref_put(&dev->ref, nvme_rdma_free_dev); ref 350 drivers/nvme/host/rdma.c return kref_get_unless_zero(&dev->ref); ref 370 drivers/nvme/host/rdma.c kref_init(&ndev->ref); ref 1127 drivers/nvme/host/rdma.c if (refcount_dec_and_test(&req->ref)) ref 1273 drivers/nvme/host/rdma.c refcount_set(&req->ref, 2); /* send and recv completions */ ref 1338 drivers/nvme/host/rdma.c if (refcount_dec_and_test(&req->ref)) ref 1481 drivers/nvme/host/rdma.c if (refcount_dec_and_test(&req->ref)) ref 406 drivers/nvme/target/core.c percpu_ref_get(&ns->ref); ref 412 drivers/nvme/target/core.c static void nvmet_destroy_namespace(struct percpu_ref *ref) ref 414 drivers/nvme/target/core.c struct nvmet_ns *ns = container_of(ref, struct nvmet_ns, ref); ref 421 drivers/nvme/target/core.c percpu_ref_put(&ns->ref); ref 541 drivers/nvme/target/core.c ret = percpu_ref_init(&ns->ref, nvmet_destroy_namespace, ref 610 drivers/nvme/target/core.c percpu_ref_kill(&ns->ref); ref 613 drivers/nvme/target/core.c percpu_ref_exit(&ns->ref); ref 724 drivers/nvme/target/core.c percpu_ref_put(&req->sq->ref); ref 747 drivers/nvme/target/core.c static void nvmet_confirm_sq(struct percpu_ref *ref) ref 749 drivers/nvme/target/core.c struct nvmet_sq *sq = container_of(ref, struct nvmet_sq, ref); ref 762 drivers/nvme/target/core.c percpu_ref_kill_and_confirm(&sq->ref, nvmet_confirm_sq); ref 765 drivers/nvme/target/core.c percpu_ref_exit(&sq->ref); ref 774 drivers/nvme/target/core.c static void nvmet_sq_free(struct percpu_ref *ref) ref 776 drivers/nvme/target/core.c struct nvmet_sq *sq = container_of(ref, struct nvmet_sq, ref); ref 785 drivers/nvme/target/core.c ret = percpu_ref_init(&sq->ref, nvmet_sq_free, 0, GFP_KERNEL); ref 910 drivers/nvme/target/core.c if (unlikely(!percpu_ref_tryget_live(&sq->ref))) { ref 928 drivers/nvme/target/core.c percpu_ref_put(&req->sq->ref); ref 1110 drivers/nvme/target/core.c if (!kref_get_unless_zero(&ctrl->ref)) ref 1253 drivers/nvme/target/core.c kref_init(&ctrl->ref); ref 1322 drivers/nvme/target/core.c static void nvmet_ctrl_free(struct kref *ref) ref 1324 drivers/nvme/target/core.c struct nvmet_ctrl *ctrl = container_of(ref, struct nvmet_ctrl, ref); ref 1349 drivers/nvme/target/core.c kref_put(&ctrl->ref, nvmet_ctrl_free); ref 1372 drivers/nvme/target/core.c if (!kref_get_unless_zero(&nvmet_disc_subsys->ref)) ref 1381 drivers/nvme/target/core.c if (!kref_get_unless_zero(&p->subsys->ref)) ref 1424 drivers/nvme/target/core.c kref_init(&subsys->ref); ref 1434 drivers/nvme/target/core.c static void nvmet_subsys_free(struct kref *ref) ref 1437 drivers/nvme/target/core.c container_of(ref, struct nvmet_subsys, ref); ref 1457 drivers/nvme/target/core.c kref_put(&subsys->ref, nvmet_subsys_free); ref 100 drivers/nvme/target/fc.c struct kref ref; ref 135 drivers/nvme/target/fc.c struct kref ref; ref 145 drivers/nvme/target/fc.c struct kref ref; ref 615 drivers/nvme/target/fc.c kref_init(&queue->ref); ref 642 drivers/nvme/target/fc.c nvmet_fc_tgt_queue_free(struct kref *ref) ref 645 drivers/nvme/target/fc.c container_of(ref, struct nvmet_fc_tgt_queue, ref); ref 664 drivers/nvme/target/fc.c kref_put(&queue->ref, nvmet_fc_tgt_queue_free); ref 670 drivers/nvme/target/fc.c return kref_get_unless_zero(&queue->ref); ref 813 drivers/nvme/target/fc.c kref_init(&assoc->ref); ref 844 drivers/nvme/target/fc.c nvmet_fc_target_assoc_free(struct kref *ref) ref 847 drivers/nvme/target/fc.c container_of(ref, struct nvmet_fc_tgt_assoc, ref); ref 862 drivers/nvme/target/fc.c kref_put(&assoc->ref, nvmet_fc_target_assoc_free); ref 868 drivers/nvme/target/fc.c return kref_get_unless_zero(&assoc->ref); ref 1061 drivers/nvme/target/fc.c kref_init(&newrec->ref); ref 1094 drivers/nvme/target/fc.c nvmet_fc_free_tgtport(struct kref *ref) ref 1097 drivers/nvme/target/fc.c container_of(ref, struct nvmet_fc_tgtport, ref); ref 1123 drivers/nvme/target/fc.c kref_put(&tgtport->ref, nvmet_fc_free_tgtport); ref 1129 drivers/nvme/target/fc.c return kref_get_unless_zero(&tgtport->ref); ref 219 drivers/nvme/target/fcloop.c struct kref ref; ref 254 drivers/nvme/target/fcloop.c struct kref ref; ref 388 drivers/nvme/target/fcloop.c fcloop_tfcp_req_free(struct kref *ref) ref 391 drivers/nvme/target/fcloop.c container_of(ref, struct fcloop_fcpreq, ref); ref 399 drivers/nvme/target/fcloop.c kref_put(&tfcp_req->ref, fcloop_tfcp_req_free); ref 405 drivers/nvme/target/fcloop.c return kref_get_unless_zero(&tfcp_req->ref); ref 553 drivers/nvme/target/fcloop.c kref_init(&tfcp_req->ref); ref 797 drivers/nvme/target/fcloop.c fcloop_nport_free(struct kref *ref) ref 800 drivers/nvme/target/fcloop.c container_of(ref, struct fcloop_nport, ref); ref 813 drivers/nvme/target/fcloop.c kref_put(&nport->ref, fcloop_nport_free); ref 819 drivers/nvme/target/fcloop.c return kref_get_unless_zero(&nport->ref); ref 1041 drivers/nvme/target/fcloop.c kref_init(&newnport->ref); ref 54 drivers/nvme/target/nvmet.h struct percpu_ref ref; ref 98 drivers/nvme/target/nvmet.h struct percpu_ref ref; ref 185 drivers/nvme/target/nvmet.h struct kref ref; ref 209 drivers/nvme/target/nvmet.h struct kref ref; ref 111 drivers/nvme/target/rdma.c struct kref ref; ref 899 drivers/nvme/target/rdma.c static void nvmet_rdma_free_dev(struct kref *ref) ref 902 drivers/nvme/target/rdma.c container_of(ref, struct nvmet_rdma_device, ref); ref 926 drivers/nvme/target/rdma.c kref_get_unless_zero(&ndev->ref)) ref 947 drivers/nvme/target/rdma.c kref_init(&ndev->ref); ref 1089 drivers/nvme/target/rdma.c kref_put(&dev->ref, nvmet_rdma_free_dev); ref 1306 drivers/nvme/target/rdma.c kref_put(&ndev->ref, nvmet_rdma_free_dev); ref 59 drivers/pci/hotplug/acpiphp.h struct kref ref; ref 112 drivers/pci/hotplug/acpiphp_glue.c kref_get(&bridge->ref); ref 117 drivers/pci/hotplug/acpiphp_glue.c kref_put(&bridge->ref, free_bridge); ref 150 drivers/pci/hotplug/acpiphp_glue.c bridge = container_of(kref, struct acpiphp_bridge, ref); ref 866 drivers/pci/hotplug/acpiphp_glue.c kref_init(&bridge->ref); ref 206 drivers/pci/p2pdma.c pgmap->ref); ref 668 drivers/pci/p2pdma.c struct percpu_ref *ref; ref 680 drivers/pci/p2pdma.c (void **) &ref); ref 684 drivers/pci/p2pdma.c if (unlikely(!percpu_ref_tryget_live(ref))) { ref 703 drivers/pci/p2pdma.c struct percpu_ref *ref; ref 706 drivers/pci/p2pdma.c (void **) &ref); ref 707 drivers/pci/p2pdma.c percpu_ref_put(ref); ref 30 drivers/pcmcia/cs_internal.h struct kref ref; ref 230 drivers/pcmcia/ds.c static void pcmcia_release_function(struct kref *ref) ref 232 drivers/pcmcia/ds.c struct config_t *c = container_of(ref, struct config_t, ref); ref 246 drivers/pcmcia/ds.c kref_put(&p_dev->function_config->ref, pcmcia_release_function); ref 540 drivers/pcmcia/ds.c kref_get(&p_dev->function_config->ref); ref 559 drivers/pcmcia/ds.c kref_init(&c->ref); ref 350 drivers/perf/xgene_pmu.c XGENE_PMU_EVENT_ATTR(ref-cmd-sent, 0x0b), ref 541 drivers/perf/xgene_pmu.c XGENE_PMU_EVENT_ATTR(ref-sent, 0x0a), ref 179 drivers/pinctrl/ti/pinctrl-ti-iodelay.c static inline u32 ti_iodelay_compute_dpe(u16 period, u16 ref, u16 delay, ref 185 drivers/pinctrl/ti/pinctrl-ti-iodelay.c m = 10 * (u64)period * (u64)ref; ref 776 drivers/power/supply/bq27xxx_battery.c #define BQ27XXX_DATA(ref, key, opt) { \ ref 779 drivers/power/supply/bq27xxx_battery.c .regs = ref##_regs, \ ref 780 drivers/power/supply/bq27xxx_battery.c .dm_regs = ref##_dm_regs, \ ref 781 drivers/power/supply/bq27xxx_battery.c .props = ref##_props, \ ref 782 drivers/power/supply/bq27xxx_battery.c .props_size = ARRAY_SIZE(ref##_props) } ref 130 drivers/rapidio/devices/rio_mport_cdev.c struct kref ref; /* refcount of vmas sharing the mapping */ ref 256 drivers/rapidio/devices/rio_mport_cdev.c static void mport_release_mapping(struct kref *ref); ref 384 drivers/rapidio/devices/rio_mport_cdev.c kref_init(&map->ref); ref 480 drivers/rapidio/devices/rio_mport_cdev.c kref_put(&map->ref, mport_release_mapping); ref 570 drivers/rapidio/devices/rio_mport_cdev.c static void dma_req_free(struct kref *ref) ref 572 drivers/rapidio/devices/rio_mport_cdev.c struct mport_dma_req *req = container_of(ref, struct mport_dma_req, ref 588 drivers/rapidio/devices/rio_mport_cdev.c kref_put(&req->map->ref, mport_release_mapping); ref 907 drivers/rapidio/devices/rio_mport_cdev.c kref_get(&map->ref); ref 1107 drivers/rapidio/devices/rio_mport_cdev.c kref_init(&map->ref); ref 1135 drivers/rapidio/devices/rio_mport_cdev.c kref_put(&mapping->ref, mport_release_mapping); ref 1159 drivers/rapidio/devices/rio_mport_cdev.c kref_put(&map->ref, mport_release_mapping); ref 1234 drivers/rapidio/devices/rio_mport_cdev.c kref_init(&map->ref); ref 1311 drivers/rapidio/devices/rio_mport_cdev.c kref_put(&mapping->ref, mport_release_mapping); ref 1346 drivers/rapidio/devices/rio_mport_cdev.c kref_put(&map->ref, mport_release_mapping); ref 2044 drivers/rapidio/devices/rio_mport_cdev.c kref_put(&map->ref, mport_release_mapping); ref 2142 drivers/rapidio/devices/rio_mport_cdev.c static void mport_release_mapping(struct kref *ref) ref 2145 drivers/rapidio/devices/rio_mport_cdev.c container_of(ref, struct rio_mport_mapping, ref); ref 2174 drivers/rapidio/devices/rio_mport_cdev.c kref_get(&map->ref); ref 2183 drivers/rapidio/devices/rio_mport_cdev.c kref_put(&map->ref, mport_release_mapping); ref 2515 drivers/rapidio/devices/rio_mport_cdev.c kref_put(&map->ref, mport_release_mapping); ref 178 drivers/rapidio/rio_cm.c struct kref ref; /* channel refcount */ ref 225 drivers/rapidio/rio_cm.c static void riocm_ch_free(struct kref *ref); ref 290 drivers/rapidio/rio_cm.c kref_get(&ch->ref); ref 297 drivers/rapidio/rio_cm.c kref_put(&ch->ref, riocm_ch_free); ref 1319 drivers/rapidio/rio_cm.c kref_init(&ch->ref); ref 1363 drivers/rapidio/rio_cm.c static void riocm_ch_free(struct kref *ref) ref 1365 drivers/rapidio/rio_cm.c struct rio_channel *ch = container_of(ref, struct rio_channel, ref); ref 570 drivers/remoteproc/remoteproc_core.c void rproc_vdev_release(struct kref *ref) ref 572 drivers/remoteproc/remoteproc_core.c struct rproc_vdev *rvdev = container_of(ref, struct rproc_vdev, refcount); ref 30 drivers/remoteproc/remoteproc_internal.h void rproc_vdev_release(struct kref *ref); ref 240 drivers/rpmsg/qcom_glink_native.c static void qcom_glink_channel_release(struct kref *ref) ref 242 drivers/rpmsg/qcom_glink_native.c struct glink_channel *channel = container_of(ref, struct glink_channel, ref 346 drivers/s390/scsi/zfcp_aux.c kref_init(&adapter->ref); ref 443 drivers/s390/scsi/zfcp_aux.c void zfcp_adapter_release(struct kref *ref) ref 445 drivers/s390/scsi/zfcp_aux.c struct zfcp_adapter *adapter = container_of(ref, struct zfcp_adapter, ref 446 drivers/s390/scsi/zfcp_aux.c ref); ref 485 drivers/s390/scsi/zfcp_aux.c kref_get(&adapter->ref); ref 29 drivers/s390/scsi/zfcp_ccw.c kref_get(&adapter->ref); ref 39 drivers/s390/scsi/zfcp_ccw.c kref_put(&adapter->ref, zfcp_adapter_release); ref 172 drivers/s390/scsi/zfcp_ccw.c kref_get(&adapter->ref); ref 149 drivers/s390/scsi/zfcp_def.h struct kref ref; ref 257 drivers/s390/scsi/zfcp_erp.c kref_get(&adapter->ref); ref 1431 drivers/s390/scsi/zfcp_erp.c kref_put(&adapter->ref, zfcp_adapter_release); ref 1459 drivers/s390/scsi/zfcp_erp.c kref_get(&adapter->ref); ref 1530 drivers/s390/scsi/zfcp_erp.c kref_put(&adapter->ref, zfcp_adapter_release); ref 498 drivers/scsi/bnx2fc/bnx2fc.h void bnx2fc_cmd_release(struct kref *ref); ref 519 drivers/scsi/bnx2fc/bnx2fc_io.c void bnx2fc_cmd_release(struct kref *ref) ref 521 drivers/scsi/bnx2fc/bnx2fc_io.c struct bnx2fc_cmd *io_req = container_of(ref, ref 109 drivers/scsi/ch.c struct kref ref; ref 568 drivers/scsi/ch.c static void ch_destroy(struct kref *ref) ref 570 drivers/scsi/ch.c scsi_changer *ch = container_of(ref, scsi_changer, ref); ref 583 drivers/scsi/ch.c kref_put(&ch->ref, ch_destroy); ref 602 drivers/scsi/ch.c kref_get(&ch->ref); ref 950 drivers/scsi/ch.c kref_init(&ch->ref); ref 980 drivers/scsi/ch.c kref_put(&ch->ref, ch_destroy); ref 74 drivers/scsi/imm.c static void imm_wakeup(void *ref) ref 76 drivers/scsi/imm.c imm_struct *dev = (imm_struct *) ref; ref 455 drivers/scsi/megaraid/megaraid_sas.h u32 ref; ref 559 drivers/scsi/megaraid/megaraid_sas.h union MR_PD_REF ref; ref 764 drivers/scsi/megaraid/megaraid_sas.h __le32 ref; ref 774 drivers/scsi/megaraid/megaraid_sas.h union MR_LD_REF ref; ref 2450 drivers/scsi/megaraid/megaraid_sas.h union MR_LD_REF ref; ref 2491 drivers/scsi/megaraid/megaraid_sas_base.c if (newmap->ref.targetId == ref 2492 drivers/scsi/megaraid/megaraid_sas_base.c savedmap->ref.targetId) { ref 2519 drivers/scsi/megaraid/megaraid_sas_base.c if (savedmap->ref.targetId == ref 2520 drivers/scsi/megaraid/megaraid_sas_base.c newmap->ref.targetId) { ref 4713 drivers/scsi/megaraid/megaraid_sas_base.c ids = ci->ldList[ld_index].ref.targetId; ref 4714 drivers/scsi/megaraid/megaraid_sas_base.c instance->ld_ids[ids] = ci->ldList[ld_index].ref.targetId; ref 64 drivers/scsi/ppa.c static void ppa_wakeup(void *ref) ref 66 drivers/scsi/ppa.c ppa_struct *dev = (ppa_struct *) ref; ref 490 drivers/scsi/qedf/qedf.h extern void qedf_release_cmd(struct kref *ref); ref 439 drivers/scsi/qedf/qedf_io.c void qedf_release_cmd(struct kref *ref) ref 442 drivers/scsi/qedf/qedf_io.c container_of(ref, struct qedf_ioreq, refcount); ref 72 drivers/scsi/qla4xxx/ql4_glbl.h void qla4xxx_srb_compl(struct kref *ref); ref 4047 drivers/scsi/qla4xxx/ql4_os.c void qla4xxx_srb_compl(struct kref *ref) ref 4049 drivers/scsi/qla4xxx/ql4_os.c struct srb *srb = container_of(ref, struct srb, srb_ref); ref 398 drivers/scsi/xen-scsifront.c int err, ref, ref_cnt = 0; ref 442 drivers/scsi/xen-scsifront.c ref = gnttab_claim_grant_reference(&gref_head); ref 443 drivers/scsi/xen-scsifront.c BUG_ON(ref == -ENOSPC); ref 445 drivers/scsi/xen-scsifront.c gnttab_grant_foreign_access_ref(ref, ref 448 drivers/scsi/xen-scsifront.c shadow->gref[ref_cnt] = ref; ref 449 drivers/scsi/xen-scsifront.c shadow->seg[ref_cnt].gref = ref; ref 476 drivers/scsi/xen-scsifront.c ref = gnttab_claim_grant_reference(&gref_head); ref 477 drivers/scsi/xen-scsifront.c BUG_ON(ref == -ENOSPC); ref 479 drivers/scsi/xen-scsifront.c gnttab_grant_foreign_access_ref(ref, ref 484 drivers/scsi/xen-scsifront.c shadow->gref[ref_cnt] = ref; ref 485 drivers/scsi/xen-scsifront.c seg->gref = ref; ref 369 drivers/sh/clk/core.c kref_init(&mapping->ref); ref 378 drivers/sh/clk/core.c kref_get(&mapping->ref); ref 392 drivers/sh/clk/core.c mapping = container_of(kref, struct clk_mapping, ref); ref 405 drivers/sh/clk/core.c kref_put(&mapping->ref, clk_destroy_mapping); ref 134 drivers/soc/qcom/smem_state.c static void qcom_smem_state_release(struct kref *ref) ref 136 drivers/soc/qcom/smem_state.c struct qcom_smem_state *state = container_of(ref, struct qcom_smem_state, refcount); ref 51 drivers/staging/gasket/gasket_sysfs.c static void release_entry(struct kref *ref) ref 1228 drivers/staging/greybus/camera.c static void gb_camera_release_module(struct kref *ref) ref 1231 drivers/staging/greybus/camera.c container_of(ref, struct gb_camera_module, refcount); ref 376 drivers/staging/media/hantro/hantro_g1_vp8_dec.c dma_addr_t ref; ref 380 drivers/staging/media/hantro/hantro_g1_vp8_dec.c ref = hantro_get_ref(cap_q, hdr->last_frame_ts); ref 381 drivers/staging/media/hantro/hantro_g1_vp8_dec.c if (!ref) ref 382 drivers/staging/media/hantro/hantro_g1_vp8_dec.c ref = vb2_dma_contig_plane_dma_addr(&vb2_dst->vb2_buf, 0); ref 383 drivers/staging/media/hantro/hantro_g1_vp8_dec.c vdpu_write_relaxed(vpu, ref, G1_REG_ADDR_REF(0)); ref 385 drivers/staging/media/hantro/hantro_g1_vp8_dec.c ref = hantro_get_ref(cap_q, hdr->golden_frame_ts); ref 386 drivers/staging/media/hantro/hantro_g1_vp8_dec.c WARN_ON(!ref && hdr->golden_frame_ts); ref 387 drivers/staging/media/hantro/hantro_g1_vp8_dec.c if (!ref) ref 388 drivers/staging/media/hantro/hantro_g1_vp8_dec.c ref = vb2_dma_contig_plane_dma_addr(&vb2_dst->vb2_buf, 0); ref 390 drivers/staging/media/hantro/hantro_g1_vp8_dec.c ref |= G1_REG_ADDR_REF_TOPC_E; ref 391 drivers/staging/media/hantro/hantro_g1_vp8_dec.c vdpu_write_relaxed(vpu, ref, G1_REG_ADDR_REF(4)); ref 393 drivers/staging/media/hantro/hantro_g1_vp8_dec.c ref = hantro_get_ref(cap_q, hdr->alt_frame_ts); ref 394 drivers/staging/media/hantro/hantro_g1_vp8_dec.c WARN_ON(!ref && hdr->alt_frame_ts); ref 395 drivers/staging/media/hantro/hantro_g1_vp8_dec.c if (!ref) ref 396 drivers/staging/media/hantro/hantro_g1_vp8_dec.c ref = vb2_dma_contig_plane_dma_addr(&vb2_dst->vb2_buf, 0); ref 398 drivers/staging/media/hantro/hantro_g1_vp8_dec.c ref |= G1_REG_ADDR_REF_TOPC_E; ref 399 drivers/staging/media/hantro/hantro_g1_vp8_dec.c vdpu_write_relaxed(vpu, ref, G1_REG_ADDR_REF(5)); ref 453 drivers/staging/media/hantro/rk3399_vpu_hw_vp8_dec.c dma_addr_t ref; ref 458 drivers/staging/media/hantro/rk3399_vpu_hw_vp8_dec.c ref = hantro_get_ref(cap_q, hdr->last_frame_ts); ref 459 drivers/staging/media/hantro/rk3399_vpu_hw_vp8_dec.c if (!ref) ref 460 drivers/staging/media/hantro/rk3399_vpu_hw_vp8_dec.c ref = vb2_dma_contig_plane_dma_addr(&vb2_dst->vb2_buf, 0); ref 461 drivers/staging/media/hantro/rk3399_vpu_hw_vp8_dec.c vdpu_write_relaxed(vpu, ref, VDPU_REG_VP8_ADDR_REF0); ref 463 drivers/staging/media/hantro/rk3399_vpu_hw_vp8_dec.c ref = hantro_get_ref(cap_q, hdr->golden_frame_ts); ref 464 drivers/staging/media/hantro/rk3399_vpu_hw_vp8_dec.c WARN_ON(!ref && hdr->golden_frame_ts); ref 465 drivers/staging/media/hantro/rk3399_vpu_hw_vp8_dec.c if (!ref) ref 466 drivers/staging/media/hantro/rk3399_vpu_hw_vp8_dec.c ref = vb2_dma_contig_plane_dma_addr(&vb2_dst->vb2_buf, 0); ref 468 drivers/staging/media/hantro/rk3399_vpu_hw_vp8_dec.c ref |= VDPU_REG_VP8_GREF_SIGN_BIAS; ref 469 drivers/staging/media/hantro/rk3399_vpu_hw_vp8_dec.c vdpu_write_relaxed(vpu, ref, VDPU_REG_VP8_ADDR_REF2_5(2)); ref 471 drivers/staging/media/hantro/rk3399_vpu_hw_vp8_dec.c ref = hantro_get_ref(cap_q, hdr->alt_frame_ts); ref 472 drivers/staging/media/hantro/rk3399_vpu_hw_vp8_dec.c WARN_ON(!ref && hdr->alt_frame_ts); ref 473 drivers/staging/media/hantro/rk3399_vpu_hw_vp8_dec.c if (!ref) ref 474 drivers/staging/media/hantro/rk3399_vpu_hw_vp8_dec.c ref = vb2_dma_contig_plane_dma_addr(&vb2_dst->vb2_buf, 0); ref 476 drivers/staging/media/hantro/rk3399_vpu_hw_vp8_dec.c ref |= VDPU_REG_VP8_AREF_SIGN_BIAS; ref 477 drivers/staging/media/hantro/rk3399_vpu_hw_vp8_dec.c vdpu_write_relaxed(vpu, ref, VDPU_REG_VP8_ADDR_REF2_5(3)); ref 57 drivers/staging/media/ipu3/ipu3-css-fw.h struct imgu_fw_isp_parameter ref; ref 65 drivers/staging/media/ipu3/ipu3-css-fw.h struct imgu_fw_isp_parameter ref; ref 795 drivers/staging/media/ipu3/ipu3-css.c &cofs->dmem.ref, ref 872 drivers/staging/media/ipu3/ipu3-css.c &sofs->dmem.ref, ref 92 drivers/staging/media/tegra-vde/dmabuf-cache.c goto ref; ref 140 drivers/staging/media/tegra-vde/dmabuf-cache.c ref: ref 1955 drivers/target/iscsi/cxgbit/cxgbit_cm.c bool ref = true; ref 1960 drivers/target/iscsi/cxgbit/cxgbit_cm.c ref = false; ref 1984 drivers/target/iscsi/cxgbit/cxgbit_cm.c if (ref) ref 438 drivers/target/target_core_tpg.c static void core_tpg_lun_ref_release(struct percpu_ref *ref) ref 440 drivers/target/target_core_tpg.c struct se_lun *lun = container_of(ref, struct se_lun, lun_ref); ref 214 drivers/target/target_core_transport.c static void target_release_sess_cmd_refcnt(struct percpu_ref *ref) ref 216 drivers/target/target_core_transport.c struct se_session *sess = container_of(ref, typeof(*sess), cmd_count); ref 68 drivers/tee/tee_core.c static void teedev_ctx_release(struct kref *ref) ref 70 drivers/tee/tee_core.c struct tee_context *ctx = container_of(ref, struct tee_context, ref 364 drivers/thermal/samsung/exynos_tmu.c u8 ref, th_code; ref 366 drivers/thermal/samsung/exynos_tmu.c ref = trips[0].temperature / MCELSIUS; ref 369 drivers/thermal/samsung/exynos_tmu.c th_code = temp_to_code(data, ref); ref 373 drivers/thermal/samsung/exynos_tmu.c temp -= ref; ref 1441 drivers/thermal/thermal_core.c struct thermal_zone_device *pos = NULL, *ref = ERR_PTR(-EINVAL); ref 1451 drivers/thermal/thermal_core.c ref = pos; ref 1457 drivers/thermal/thermal_core.c ref = ERR_PTR(-ENODEV); ref 1460 drivers/thermal/thermal_core.c ref = ERR_PTR(-EEXIST); ref 1463 drivers/thermal/thermal_core.c return ref; ref 370 drivers/tty/hvc/hvc_xen.c int ret, evtchn, devid, ref, irq; ref 391 drivers/tty/hvc/hvc_xen.c ref = gnttab_claim_grant_reference(&gref_head); ref 392 drivers/tty/hvc/hvc_xen.c if (ref < 0) ref 393 drivers/tty/hvc/hvc_xen.c return ref; ref 394 drivers/tty/hvc/hvc_xen.c gnttab_grant_foreign_access_ref(ref, info->xbdev->otherend_id, ref 403 drivers/tty/hvc/hvc_xen.c ret = xenbus_printf(xbt, dev->nodename, "ring-ref", "%d", ref); ref 84 drivers/tty/n_gsm.c struct kref ref; ref 186 drivers/tty/n_gsm.c struct kref ref; ref 2158 drivers/tty/n_gsm.c static void gsm_free_muxr(struct kref *ref) ref 2160 drivers/tty/n_gsm.c struct gsm_mux *gsm = container_of(ref, struct gsm_mux, ref); ref 2166 drivers/tty/n_gsm.c kref_get(&gsm->ref); ref 2171 drivers/tty/n_gsm.c kref_put(&gsm->ref, gsm_free_muxr); ref 2208 drivers/tty/n_gsm.c kref_init(&gsm->ref); ref 2665 drivers/tty/n_gsm.c static void net_free(struct kref *ref) ref 2670 drivers/tty/n_gsm.c mux_net = container_of(ref, struct gsm_mux_net, ref); ref 2681 drivers/tty/n_gsm.c kref_get(&mux_net->ref); ref 2686 drivers/tty/n_gsm.c kref_put(&mux_net->ref, net_free); ref 2817 drivers/tty/n_gsm.c kref_init(&mux_net->ref); ref 488 drivers/usb/core/config.c void usb_release_interface_cache(struct kref *ref) ref 490 drivers/usb/core/config.c struct usb_interface_cache *intfc = ref_to_usb_interface_cache(ref); ref 780 drivers/usb/core/config.c kref_init(&intfc->ref); ref 850 drivers/usb/core/config.c kref_put(&cf->intf_cache[i]->ref, ref 1633 drivers/usb/core/message.c kref_put(&intfc->ref, usb_release_interface_cache); ref 1928 drivers/usb/core/message.c kref_get(&intfc->ref); ref 31 drivers/usb/core/usb.h extern void usb_release_interface_cache(struct kref *ref); ref 1699 drivers/usb/gadget/function/f_fs.c refcount_inc(&ffs->ref); ref 1706 drivers/usb/gadget/function/f_fs.c refcount_inc(&ffs->ref); ref 1718 drivers/usb/gadget/function/f_fs.c if (unlikely(refcount_dec_and_test(&ffs->ref))) { ref 1771 drivers/usb/gadget/function/f_fs.c refcount_set(&ffs->ref, 1); ref 76 drivers/usb/gadget/function/tcm.h struct kref ref; ref 179 drivers/usb/gadget/function/u_fs.h refcount_t ref; ref 2390 drivers/usb/host/isp1362-hcd.c u16 *ref; ref 2393 drivers/usb/host/isp1362-hcd.c ref = kmalloc(2 * ISP1362_BUF_SIZE, GFP_KERNEL); ref 2394 drivers/usb/host/isp1362-hcd.c if (ref) { ref 2396 drivers/usb/host/isp1362-hcd.c u16 *tst = &ref[ISP1362_BUF_SIZE / 2]; ref 2399 drivers/usb/host/isp1362-hcd.c ref[offset] = ~offset; ref 2408 drivers/usb/host/isp1362-hcd.c isp1362_write_buffer(isp1362_hcd, (u8 *)ref + offset, 0, j); ref 2412 drivers/usb/host/isp1362-hcd.c if (memcmp(ref, tst, j)) { ref 2416 drivers/usb/host/isp1362-hcd.c dump_data((u8 *)ref + offset, j); ref 2423 drivers/usb/host/isp1362-hcd.c isp1362_write_buffer(isp1362_hcd, ref, 0, ISP1362_BUF_SIZE); ref 2427 drivers/usb/host/isp1362-hcd.c if (memcmp(ref, tst, ISP1362_BUF_SIZE)) { ref 2450 drivers/usb/host/isp1362-hcd.c isp1362_write_buffer(isp1362_hcd, ref, offset * 2, PTD_HEADER_SIZE); ref 2451 drivers/usb/host/isp1362-hcd.c isp1362_write_buffer(isp1362_hcd, ref + PTD_HEADER_SIZE / sizeof(*ref), ref 2456 drivers/usb/host/isp1362-hcd.c if (memcmp(ref, tst, PTD_HEADER_SIZE + test_size)) { ref 2457 drivers/usb/host/isp1362-hcd.c dump_data(((u8 *)ref) + offset, PTD_HEADER_SIZE + test_size); ref 2463 drivers/usb/host/isp1362-hcd.c if (memcmp(ref, tst, PTD_HEADER_SIZE + test_size)) { ref 2473 drivers/usb/host/isp1362-hcd.c kfree(ref); ref 57 drivers/usb/mon/mon_main.c kref_get(&mbus->ref); ref 76 drivers/usb/mon/mon_main.c kref_put(&mbus->ref, mon_bus_drop); ref 221 drivers/usb/mon/mon_main.c kref_put(&mbus->ref, mon_bus_drop); ref 273 drivers/usb/mon/mon_main.c struct mon_bus *mbus = container_of(r, struct mon_bus, ref); ref 290 drivers/usb/mon/mon_main.c kref_init(&mbus->ref); ref 317 drivers/usb/mon/mon_main.c kref_init(&mbus->ref); ref 413 drivers/usb/mon/mon_main.c kref_get(&mbus->ref); /* Force leak */ ref 417 drivers/usb/mon/mon_main.c kref_put(&mbus->ref, mon_bus_drop); ref 33 drivers/usb/mon/usb_mon.h struct kref ref; /* Under mon_lock */ ref 81 drivers/vfio/mdev/mdev_core.c ref); ref 91 drivers/vfio/mdev/mdev_core.c kref_get(&parent->ref); ref 99 drivers/vfio/mdev/mdev_core.c kref_put(&parent->ref, mdev_release_parent); ref 173 drivers/vfio/mdev/mdev_core.c kref_init(&parent->ref); ref 19 drivers/vfio/mdev/mdev_private.h struct kref ref; ref 483 drivers/xen/gntdev-dmabuf.c map->grants[i].ref = refs[i]; ref 284 drivers/xen/gntdev.c map->grants[pgnr].ref, ref 311 drivers/xen/gntdev.c map->grants[i].ref, ref 330 drivers/xen/gntdev.c map->grants[i].ref, ref 943 drivers/xen/gntdev.c op->source.u.ref = seg->source.foreign.ref; ref 962 drivers/xen/gntdev.c op->dest.u.ref = seg->dest.foreign.ref; ref 118 drivers/xen/grant-table.c void (*update_entry)(grant_ref_t ref, domid_t domid, ref 128 drivers/xen/grant-table.c int (*end_foreign_access_ref)(grant_ref_t ref, int readonly); ref 136 drivers/xen/grant-table.c unsigned long (*end_foreign_transfer_ref)(grant_ref_t ref); ref 143 drivers/xen/grant-table.c int (*query_foreign_access)(grant_ref_t ref); ref 173 drivers/xen/grant-table.c int ref, rc = 0; ref 184 drivers/xen/grant-table.c ref = head = gnttab_free_head; ref 193 drivers/xen/grant-table.c return ref; ref 222 drivers/xen/grant-table.c static void put_free_entry(grant_ref_t ref) ref 226 drivers/xen/grant-table.c gnttab_entry(ref) = gnttab_free_head; ref 227 drivers/xen/grant-table.c gnttab_free_head = ref; ref 244 drivers/xen/grant-table.c static void gnttab_update_entry_v1(grant_ref_t ref, domid_t domid, ref 247 drivers/xen/grant-table.c gnttab_shared.v1[ref].domid = domid; ref 248 drivers/xen/grant-table.c gnttab_shared.v1[ref].frame = frame; ref 250 drivers/xen/grant-table.c gnttab_shared.v1[ref].flags = flags; ref 253 drivers/xen/grant-table.c static void gnttab_update_entry_v2(grant_ref_t ref, domid_t domid, ref 256 drivers/xen/grant-table.c gnttab_shared.v2[ref].hdr.domid = domid; ref 257 drivers/xen/grant-table.c gnttab_shared.v2[ref].full_page.frame = frame; ref 259 drivers/xen/grant-table.c gnttab_shared.v2[ref].hdr.flags = GTF_permit_access | flags; ref 265 drivers/xen/grant-table.c void gnttab_grant_foreign_access_ref(grant_ref_t ref, domid_t domid, ref 268 drivers/xen/grant-table.c gnttab_interface->update_entry(ref, domid, frame, ref 276 drivers/xen/grant-table.c int ref; ref 278 drivers/xen/grant-table.c ref = get_free_entries(1); ref 279 drivers/xen/grant-table.c if (unlikely(ref < 0)) ref 282 drivers/xen/grant-table.c gnttab_grant_foreign_access_ref(ref, domid, frame, readonly); ref 284 drivers/xen/grant-table.c return ref; ref 288 drivers/xen/grant-table.c static int gnttab_query_foreign_access_v1(grant_ref_t ref) ref 290 drivers/xen/grant-table.c return gnttab_shared.v1[ref].flags & (GTF_reading|GTF_writing); ref 293 drivers/xen/grant-table.c static int gnttab_query_foreign_access_v2(grant_ref_t ref) ref 295 drivers/xen/grant-table.c return grstatus[ref] & (GTF_reading|GTF_writing); ref 298 drivers/xen/grant-table.c int gnttab_query_foreign_access(grant_ref_t ref) ref 300 drivers/xen/grant-table.c return gnttab_interface->query_foreign_access(ref); ref 304 drivers/xen/grant-table.c static int gnttab_end_foreign_access_ref_v1(grant_ref_t ref, int readonly) ref 309 drivers/xen/grant-table.c pflags = &gnttab_shared.v1[ref].flags; ref 320 drivers/xen/grant-table.c static int gnttab_end_foreign_access_ref_v2(grant_ref_t ref, int readonly) ref 322 drivers/xen/grant-table.c gnttab_shared.v2[ref].hdr.flags = 0; ref 324 drivers/xen/grant-table.c if (grstatus[ref] & (GTF_reading|GTF_writing)) { ref 343 drivers/xen/grant-table.c static inline int _gnttab_end_foreign_access_ref(grant_ref_t ref, int readonly) ref 345 drivers/xen/grant-table.c return gnttab_interface->end_foreign_access_ref(ref, readonly); ref 348 drivers/xen/grant-table.c int gnttab_end_foreign_access_ref(grant_ref_t ref, int readonly) ref 350 drivers/xen/grant-table.c if (_gnttab_end_foreign_access_ref(ref, readonly)) ref 352 drivers/xen/grant-table.c pr_warn("WARNING: g.e. %#x still in use!\n", ref); ref 359 drivers/xen/grant-table.c grant_ref_t ref; ref 384 drivers/xen/grant-table.c if (_gnttab_end_foreign_access_ref(entry->ref, entry->ro)) { ref 385 drivers/xen/grant-table.c put_free_entry(entry->ref); ref 388 drivers/xen/grant-table.c entry->ref, page_to_pfn(entry->page)); ref 391 drivers/xen/grant-table.c pr_info("freeing g.e. %#x\n", entry->ref); ref 396 drivers/xen/grant-table.c pr_info("g.e. %#x still pending\n", entry->ref); ref 413 drivers/xen/grant-table.c static void gnttab_add_deferred(grant_ref_t ref, bool readonly, ref 422 drivers/xen/grant-table.c entry->ref = ref; ref 436 drivers/xen/grant-table.c what, ref, page ? page_to_pfn(page) : -1); ref 439 drivers/xen/grant-table.c void gnttab_end_foreign_access(grant_ref_t ref, int readonly, ref 442 drivers/xen/grant-table.c if (gnttab_end_foreign_access_ref(ref, readonly)) { ref 443 drivers/xen/grant-table.c put_free_entry(ref); ref 447 drivers/xen/grant-table.c gnttab_add_deferred(ref, readonly, ref 454 drivers/xen/grant-table.c int ref; ref 456 drivers/xen/grant-table.c ref = get_free_entries(1); ref 457 drivers/xen/grant-table.c if (unlikely(ref < 0)) ref 459 drivers/xen/grant-table.c gnttab_grant_foreign_transfer_ref(ref, domid, pfn); ref 461 drivers/xen/grant-table.c return ref; ref 465 drivers/xen/grant-table.c void gnttab_grant_foreign_transfer_ref(grant_ref_t ref, domid_t domid, ref 468 drivers/xen/grant-table.c gnttab_interface->update_entry(ref, domid, pfn, GTF_accept_transfer); ref 472 drivers/xen/grant-table.c static unsigned long gnttab_end_foreign_transfer_ref_v1(grant_ref_t ref) ref 478 drivers/xen/grant-table.c pflags = &gnttab_shared.v1[ref].flags; ref 497 drivers/xen/grant-table.c frame = gnttab_shared.v1[ref].frame; ref 503 drivers/xen/grant-table.c static unsigned long gnttab_end_foreign_transfer_ref_v2(grant_ref_t ref) ref 509 drivers/xen/grant-table.c pflags = &gnttab_shared.v2[ref].hdr.flags; ref 528 drivers/xen/grant-table.c frame = gnttab_shared.v2[ref].full_page.frame; ref 534 drivers/xen/grant-table.c unsigned long gnttab_end_foreign_transfer_ref(grant_ref_t ref) ref 536 drivers/xen/grant-table.c return gnttab_interface->end_foreign_transfer_ref(ref); ref 540 drivers/xen/grant-table.c unsigned long gnttab_end_foreign_transfer(grant_ref_t ref) ref 542 drivers/xen/grant-table.c unsigned long frame = gnttab_end_foreign_transfer_ref(ref); ref 543 drivers/xen/grant-table.c put_free_entry(ref); ref 548 drivers/xen/grant-table.c void gnttab_free_grant_reference(grant_ref_t ref) ref 550 drivers/xen/grant-table.c put_free_entry(ref); ref 556 drivers/xen/grant-table.c grant_ref_t ref; ref 562 drivers/xen/grant-table.c ref = head; ref 563 drivers/xen/grant-table.c while (gnttab_entry(ref) != GNTTAB_LIST_END) { ref 564 drivers/xen/grant-table.c ref = gnttab_entry(ref); ref 567 drivers/xen/grant-table.c gnttab_entry(ref) = gnttab_free_head; ref 1051 drivers/xen/grant-table.c foreign->gref = map_ops[i].ref; ref 59 drivers/xen/pvcalls-back.c grant_ref_t ref; ref 302 drivers/xen/pvcalls-back.c grant_ref_t ref, ref 317 drivers/xen/pvcalls-back.c map->ref = ref; ref 319 drivers/xen/pvcalls-back.c ret = xenbus_map_ring_valloc(fedata->dev, &ref, 1, &page); ref 331 drivers/xen/pvcalls-back.c ret = xenbus_map_ring_valloc(fedata->dev, map->ring->ref, ref 404 drivers/xen/pvcalls-back.c req->u.connect.ref, ref 546 drivers/xen/pvcalls-back.c req->u.accept.ref, ref 33 drivers/xen/pvcalls-front.c grant_ref_t ref; ref 64 drivers/xen/pvcalls-front.c grant_ref_t ref; ref 241 drivers/xen/pvcalls-front.c gnttab_end_foreign_access(map->active.ring->ref[i], 0, 0); ref 242 drivers/xen/pvcalls-front.c gnttab_end_foreign_access(map->active.ref, 0, 0); ref 381 drivers/xen/pvcalls-front.c map->active.ring->ref[i] = gnttab_grant_foreign_access( ref 385 drivers/xen/pvcalls-front.c map->active.ref = gnttab_grant_foreign_access( ref 456 drivers/xen/pvcalls-front.c req->u.connect.ref = map->active.ref; ref 848 drivers/xen/pvcalls-front.c req->u.accept.ref = map2->active.ref; ref 1117 drivers/xen/pvcalls-front.c if (bedata->ref != -1) ref 1118 drivers/xen/pvcalls-front.c gnttab_end_foreign_access(bedata->ref, 0, 0); ref 1172 drivers/xen/pvcalls-front.c bedata->ref = -1; ref 1202 drivers/xen/pvcalls-front.c bedata->ref = ret; ref 1203 drivers/xen/pvcalls-front.c gnttab_grant_foreign_access_ref(bedata->ref, dev->otherend_id, ref 1215 drivers/xen/pvcalls-front.c ret = xenbus_printf(xbt, dev->nodename, "ring-ref", "%d", bedata->ref); ref 596 fs/aio.c static void free_ioctx_reqs(struct percpu_ref *ref) ref 598 fs/aio.c struct kioctx *ctx = container_of(ref, struct kioctx, reqs); ref 614 fs/aio.c static void free_ioctx_users(struct percpu_ref *ref) ref 616 fs/aio.c struct kioctx *ctx = container_of(ref, struct kioctx, users); ref 282 fs/block_dev.c atomic_t ref; ref 307 fs/block_dev.c if (!dio->multi_bio || atomic_dec_and_test(&dio->ref)) { ref 431 fs/block_dev.c atomic_set(&dio->ref, 2); ref 433 fs/block_dev.c atomic_inc(&dio->ref); ref 165 fs/btrfs/backref.c static void free_pref(struct prelim_ref *ref) ref 167 fs/btrfs/backref.c kmem_cache_free(btrfs_prelim_ref_cache, ref); ref 231 fs/btrfs/backref.c struct prelim_ref *ref; ref 240 fs/btrfs/backref.c ref = rb_entry(parent, struct prelim_ref, rbnode); ref 241 fs/btrfs/backref.c result = prelim_ref_compare(ref, newref); ref 249 fs/btrfs/backref.c struct extent_inode_elem *eie = ref->inode_list; ref 255 fs/btrfs/backref.c ref->inode_list = newref->inode_list; ref 258 fs/btrfs/backref.c trace_btrfs_prelim_ref_merge(fs_info, ref, newref, ref 265 fs/btrfs/backref.c update_share_count(sc, ref->count, ref 266 fs/btrfs/backref.c ref->count + newref->count); ref 267 fs/btrfs/backref.c ref->count += newref->count; ref 286 fs/btrfs/backref.c struct prelim_ref *ref, *next_ref; ref 288 fs/btrfs/backref.c rbtree_postorder_for_each_entry_safe(ref, next_ref, ref 290 fs/btrfs/backref.c free_pref(ref); ref 340 fs/btrfs/backref.c struct prelim_ref *ref; ref 345 fs/btrfs/backref.c ref = kmem_cache_alloc(btrfs_prelim_ref_cache, gfp_mask); ref 346 fs/btrfs/backref.c if (!ref) ref 349 fs/btrfs/backref.c ref->root_id = root_id; ref 351 fs/btrfs/backref.c ref->key_for_search = *key; ref 371 fs/btrfs/backref.c if (ref->key_for_search.type == BTRFS_EXTENT_DATA_KEY && ref 372 fs/btrfs/backref.c ref->key_for_search.offset >= LLONG_MAX) ref 373 fs/btrfs/backref.c ref->key_for_search.offset = 0; ref 375 fs/btrfs/backref.c memset(&ref->key_for_search, 0, sizeof(ref->key_for_search)); ref 378 fs/btrfs/backref.c ref->inode_list = NULL; ref 379 fs/btrfs/backref.c ref->level = level; ref 380 fs/btrfs/backref.c ref->count = count; ref 381 fs/btrfs/backref.c ref->parent = parent; ref 382 fs/btrfs/backref.c ref->wanted_disk_byte = wanted_disk_byte; ref 383 fs/btrfs/backref.c prelim_ref_insert(fs_info, preftree, ref, sc); ref 413 fs/btrfs/backref.c struct ulist *parents, struct prelim_ref *ref, ref 421 fs/btrfs/backref.c struct btrfs_key *key_for_search = &ref->key_for_search; ref 425 fs/btrfs/backref.c u64 wanted_disk_byte = ref->wanted_disk_byte; ref 505 fs/btrfs/backref.c struct prelim_ref *ref, struct ulist *parents, ref 514 fs/btrfs/backref.c int level = ref->level; ref 517 fs/btrfs/backref.c root_key.objectid = ref->root_id; ref 550 fs/btrfs/backref.c ret = btrfs_search_slot(NULL, root, &ref->key_for_search, path, ref 553 fs/btrfs/backref.c ret = btrfs_search_old_slot(root, &ref->key_for_search, path, ref 561 fs/btrfs/backref.c ref->root_id, level, ref->count, ret, ref 562 fs/btrfs/backref.c ref->key_for_search.objectid, ref->key_for_search.type, ref 563 fs/btrfs/backref.c ref->key_for_search.offset); ref 577 fs/btrfs/backref.c ret = add_all_parents(root, path, parents, ref, level, time_seq, ref 633 fs/btrfs/backref.c struct prelim_ref *ref; ref 635 fs/btrfs/backref.c ref = rb_entry(rnode, struct prelim_ref, rbnode); ref 636 fs/btrfs/backref.c if (WARN(ref->parent, ref 642 fs/btrfs/backref.c rb_erase_cached(&ref->rbnode, &preftrees->indirect.root); ref 645 fs/btrfs/backref.c if (ref->count == 0) { ref 646 fs/btrfs/backref.c free_pref(ref); ref 651 fs/btrfs/backref.c ref->root_id != sc->root_objectid) { ref 652 fs/btrfs/backref.c free_pref(ref); ref 656 fs/btrfs/backref.c err = resolve_indirect_ref(fs_info, path, time_seq, ref, ref 664 fs/btrfs/backref.c prelim_ref_insert(fs_info, &preftrees->direct, ref, ref 668 fs/btrfs/backref.c free_pref(ref); ref 676 fs/btrfs/backref.c ref->parent = node ? node->val : 0; ref 677 fs/btrfs/backref.c ref->inode_list = unode_aux_to_inode_list(node); ref 686 fs/btrfs/backref.c free_pref(ref); ref 690 fs/btrfs/backref.c memcpy(new_ref, ref, sizeof(*ref)); ref 701 fs/btrfs/backref.c prelim_ref_insert(fs_info, &preftrees->direct, ref, NULL); ref 717 fs/btrfs/backref.c struct prelim_ref *ref; ref 723 fs/btrfs/backref.c ref = rb_entry(node, struct prelim_ref, rbnode); ref 726 fs/btrfs/backref.c BUG_ON(ref->parent); /* should not be a direct ref */ ref 727 fs/btrfs/backref.c BUG_ON(ref->key_for_search.type); ref 728 fs/btrfs/backref.c BUG_ON(!ref->wanted_disk_byte); ref 730 fs/btrfs/backref.c eb = read_tree_block(fs_info, ref->wanted_disk_byte, 0, ref 731 fs/btrfs/backref.c ref->level - 1, NULL); ref 733 fs/btrfs/backref.c free_pref(ref); ref 736 fs/btrfs/backref.c free_pref(ref); ref 743 fs/btrfs/backref.c btrfs_item_key_to_cpu(eb, &ref->key_for_search, 0); ref 745 fs/btrfs/backref.c btrfs_node_key_to_cpu(eb, &ref->key_for_search, 0); ref 749 fs/btrfs/backref.c prelim_ref_insert(fs_info, &preftrees->indirect, ref, NULL); ref 800 fs/btrfs/backref.c struct btrfs_delayed_tree_ref *ref; ref 802 fs/btrfs/backref.c ref = btrfs_delayed_node_to_tree_ref(node); ref 803 fs/btrfs/backref.c ret = add_indirect_ref(fs_info, preftrees, ref->root, ref 804 fs/btrfs/backref.c &tmp_op_key, ref->level + 1, ref 811 fs/btrfs/backref.c struct btrfs_delayed_tree_ref *ref; ref 813 fs/btrfs/backref.c ref = btrfs_delayed_node_to_tree_ref(node); ref 815 fs/btrfs/backref.c ret = add_direct_ref(fs_info, preftrees, ref->level + 1, ref 816 fs/btrfs/backref.c ref->parent, node->bytenr, count, ref 822 fs/btrfs/backref.c struct btrfs_delayed_data_ref *ref; ref 823 fs/btrfs/backref.c ref = btrfs_delayed_node_to_data_ref(node); ref 825 fs/btrfs/backref.c key.objectid = ref->objectid; ref 827 fs/btrfs/backref.c key.offset = ref->offset; ref 833 fs/btrfs/backref.c if (sc && sc->inum && ref->objectid != sc->inum) { ref 838 fs/btrfs/backref.c ret = add_indirect_ref(fs_info, preftrees, ref->root, ref 845 fs/btrfs/backref.c struct btrfs_delayed_data_ref *ref; ref 847 fs/btrfs/backref.c ref = btrfs_delayed_node_to_data_ref(node); ref 849 fs/btrfs/backref.c ret = add_direct_ref(fs_info, preftrees, 0, ref->parent, ref 1125 fs/btrfs/backref.c struct prelim_ref *ref; ref 1254 fs/btrfs/backref.c ref = rb_entry(node, struct prelim_ref, rbnode); ref 1255 fs/btrfs/backref.c node = rb_next(&ref->rbnode); ref 1266 fs/btrfs/backref.c if (roots && ref->count && ref->root_id && ref->parent == 0) { ref 1268 fs/btrfs/backref.c ref->root_id != sc->root_objectid) { ref 1274 fs/btrfs/backref.c ret = ulist_add(roots, ref->root_id, 0, GFP_NOFS); ref 1278 fs/btrfs/backref.c if (ref->count && ref->parent) { ref 1279 fs/btrfs/backref.c if (extent_item_pos && !ref->inode_list && ref 1280 fs/btrfs/backref.c ref->level == 0) { ref 1283 fs/btrfs/backref.c eb = read_tree_block(fs_info, ref->parent, 0, ref 1284 fs/btrfs/backref.c ref->level, NULL); ref 1305 fs/btrfs/backref.c ref->inode_list = eie; ref 1307 fs/btrfs/backref.c ret = ulist_add_merge_ptr(refs, ref->parent, ref 1308 fs/btrfs/backref.c ref->inode_list, ref 1320 fs/btrfs/backref.c eie->next = ref->inode_list; ref 2442 fs/btrfs/ctree.h int btrfs_free_extent(struct btrfs_trans_handle *trans, struct btrfs_ref *ref); ref 418 fs/btrfs/delayed-ref.c struct btrfs_delayed_ref_node *ref) ref 421 fs/btrfs/delayed-ref.c rb_erase_cached(&ref->ref_node, &head->ref_tree); ref 422 fs/btrfs/delayed-ref.c RB_CLEAR_NODE(&ref->ref_node); ref 423 fs/btrfs/delayed-ref.c if (!list_empty(&ref->add_list)) ref 424 fs/btrfs/delayed-ref.c list_del(&ref->add_list); ref 425 fs/btrfs/delayed-ref.c ref->in_tree = 0; ref 426 fs/btrfs/delayed-ref.c btrfs_put_delayed_ref(ref); ref 433 fs/btrfs/delayed-ref.c struct btrfs_delayed_ref_node *ref, ref 437 fs/btrfs/delayed-ref.c struct rb_node *node = rb_next(&ref->ref_node); ref 447 fs/btrfs/delayed-ref.c if (comp_refs(ref, next, false)) ref 450 fs/btrfs/delayed-ref.c if (ref->action == next->action) { ref 453 fs/btrfs/delayed-ref.c if (ref->ref_mod < next->ref_mod) { ref 454 fs/btrfs/delayed-ref.c swap(ref, next); ref 461 fs/btrfs/delayed-ref.c ref->ref_mod += mod; ref 462 fs/btrfs/delayed-ref.c if (ref->ref_mod == 0) { ref 463 fs/btrfs/delayed-ref.c drop_delayed_ref(trans, delayed_refs, head, ref); ref 469 fs/btrfs/delayed-ref.c WARN_ON(ref->type == BTRFS_TREE_BLOCK_REF_KEY || ref 470 fs/btrfs/delayed-ref.c ref->type == BTRFS_SHARED_BLOCK_REF_KEY); ref 482 fs/btrfs/delayed-ref.c struct btrfs_delayed_ref_node *ref; ref 508 fs/btrfs/delayed-ref.c ref = rb_entry(node, struct btrfs_delayed_ref_node, ref_node); ref 509 fs/btrfs/delayed-ref.c if (seq && ref->seq >= seq) ref 511 fs/btrfs/delayed-ref.c if (merge_ref(trans, delayed_refs, head, ref, seq)) ref 598 fs/btrfs/delayed-ref.c struct btrfs_delayed_ref_node *ref) ref 605 fs/btrfs/delayed-ref.c exist = tree_insert(&href->ref_tree, ref); ref 611 fs/btrfs/delayed-ref.c if (exist->action == ref->action) { ref 612 fs/btrfs/delayed-ref.c mod = ref->ref_mod; ref 615 fs/btrfs/delayed-ref.c if (exist->ref_mod < ref->ref_mod) { ref 616 fs/btrfs/delayed-ref.c exist->action = ref->action; ref 618 fs/btrfs/delayed-ref.c exist->ref_mod = ref->ref_mod; ref 619 fs/btrfs/delayed-ref.c if (ref->action == BTRFS_ADD_DELAYED_REF) ref 622 fs/btrfs/delayed-ref.c else if (ref->action == BTRFS_DROP_DELAYED_REF) { ref 629 fs/btrfs/delayed-ref.c mod = -ref->ref_mod; ref 639 fs/btrfs/delayed-ref.c if (ref->action == BTRFS_ADD_DELAYED_REF) ref 640 fs/btrfs/delayed-ref.c list_add_tail(&ref->add_list, &href->ref_add_list); ref 881 fs/btrfs/delayed-ref.c struct btrfs_delayed_ref_node *ref, ref 893 fs/btrfs/delayed-ref.c refcount_set(&ref->refs, 1); ref 894 fs/btrfs/delayed-ref.c ref->bytenr = bytenr; ref 895 fs/btrfs/delayed-ref.c ref->num_bytes = num_bytes; ref 896 fs/btrfs/delayed-ref.c ref->ref_mod = 1; ref 897 fs/btrfs/delayed-ref.c ref->action = action; ref 898 fs/btrfs/delayed-ref.c ref->is_head = 0; ref 899 fs/btrfs/delayed-ref.c ref->in_tree = 1; ref 900 fs/btrfs/delayed-ref.c ref->seq = seq; ref 901 fs/btrfs/delayed-ref.c ref->type = ref_type; ref 902 fs/btrfs/delayed-ref.c RB_CLEAR_NODE(&ref->ref_node); ref 903 fs/btrfs/delayed-ref.c INIT_LIST_HEAD(&ref->add_list); ref 917 fs/btrfs/delayed-ref.c struct btrfs_delayed_tree_ref *ref; ref 935 fs/btrfs/delayed-ref.c ref = kmem_cache_alloc(btrfs_delayed_tree_ref_cachep, GFP_NOFS); ref 936 fs/btrfs/delayed-ref.c if (!ref) ref 941 fs/btrfs/delayed-ref.c kmem_cache_free(btrfs_delayed_tree_ref_cachep, ref); ref 951 fs/btrfs/delayed-ref.c kmem_cache_free(btrfs_delayed_tree_ref_cachep, ref); ref 962 fs/btrfs/delayed-ref.c init_delayed_ref_common(fs_info, &ref->node, bytenr, num_bytes, ref 964 fs/btrfs/delayed-ref.c ref->root = generic_ref->tree_ref.root; ref 965 fs/btrfs/delayed-ref.c ref->parent = parent; ref 966 fs/btrfs/delayed-ref.c ref->level = level; ref 984 fs/btrfs/delayed-ref.c ret = insert_delayed_ref(trans, delayed_refs, head_ref, &ref->node); ref 993 fs/btrfs/delayed-ref.c trace_add_delayed_tree_ref(fs_info, &ref->node, ref, ref 997 fs/btrfs/delayed-ref.c kmem_cache_free(btrfs_delayed_tree_ref_cachep, ref); ref 1014 fs/btrfs/delayed-ref.c struct btrfs_delayed_data_ref *ref; ref 1030 fs/btrfs/delayed-ref.c ref = kmem_cache_alloc(btrfs_delayed_data_ref_cachep, GFP_NOFS); ref 1031 fs/btrfs/delayed-ref.c if (!ref) ref 1038 fs/btrfs/delayed-ref.c init_delayed_ref_common(fs_info, &ref->node, bytenr, num_bytes, ref 1040 fs/btrfs/delayed-ref.c ref->root = ref_root; ref 1041 fs/btrfs/delayed-ref.c ref->parent = parent; ref 1042 fs/btrfs/delayed-ref.c ref->objectid = owner; ref 1043 fs/btrfs/delayed-ref.c ref->offset = offset; ref 1048 fs/btrfs/delayed-ref.c kmem_cache_free(btrfs_delayed_data_ref_cachep, ref); ref 1058 fs/btrfs/delayed-ref.c kmem_cache_free(btrfs_delayed_data_ref_cachep, ref); ref 1080 fs/btrfs/delayed-ref.c ret = insert_delayed_ref(trans, delayed_refs, head_ref, &ref->node); ref 1089 fs/btrfs/delayed-ref.c trace_add_delayed_data_ref(trans->fs_info, &ref->node, ref, ref 1093 fs/btrfs/delayed-ref.c kmem_cache_free(btrfs_delayed_data_ref_cachep, ref); ref 309 fs/btrfs/delayed-ref.h static inline void btrfs_put_delayed_ref(struct btrfs_delayed_ref_node *ref) ref 311 fs/btrfs/delayed-ref.h WARN_ON(refcount_read(&ref->refs) == 0); ref 312 fs/btrfs/delayed-ref.h if (refcount_dec_and_test(&ref->refs)) { ref 313 fs/btrfs/delayed-ref.h WARN_ON(ref->in_tree); ref 314 fs/btrfs/delayed-ref.h switch (ref->type) { ref 317 fs/btrfs/delayed-ref.h kmem_cache_free(btrfs_delayed_tree_ref_cachep, ref); ref 321 fs/btrfs/delayed-ref.h kmem_cache_free(btrfs_delayed_data_ref_cachep, ref); ref 4270 fs/btrfs/disk-io.c struct btrfs_delayed_ref_node *ref; ref 4294 fs/btrfs/disk-io.c ref = rb_entry(n, struct btrfs_delayed_ref_node, ref 4296 fs/btrfs/disk-io.c ref->in_tree = 0; ref 4297 fs/btrfs/disk-io.c rb_erase_cached(&ref->ref_node, &head->ref_tree); ref 4298 fs/btrfs/disk-io.c RB_CLEAR_NODE(&ref->ref_node); ref 4299 fs/btrfs/disk-io.c if (!list_empty(&ref->add_list)) ref 4300 fs/btrfs/disk-io.c list_del(&ref->add_list); ref 4302 fs/btrfs/disk-io.c btrfs_put_delayed_ref(ref); ref 162 fs/btrfs/export.c struct btrfs_root_ref *ref; ref 202 fs/btrfs/export.c ref = btrfs_item_ptr(leaf, path->slots[0], ref 204 fs/btrfs/export.c key.objectid = btrfs_root_ref_dirid(leaf, ref); ref 87 fs/btrfs/extent-tree.c static u64 generic_ref_to_space_flags(struct btrfs_ref *ref) ref 89 fs/btrfs/extent-tree.c if (ref->type == BTRFS_REF_METADATA) { ref 90 fs/btrfs/extent-tree.c if (ref->tree_ref.root == BTRFS_CHUNK_TREE_OBJECTID) ref 99 fs/btrfs/extent-tree.c struct btrfs_ref *ref) ref 102 fs/btrfs/extent-tree.c u64 flags = generic_ref_to_space_flags(ref); ref 106 fs/btrfs/extent-tree.c percpu_counter_add_batch(&space_info->total_bytes_pinned, ref->len, ref 111 fs/btrfs/extent-tree.c struct btrfs_ref *ref) ref 114 fs/btrfs/extent-tree.c u64 flags = generic_ref_to_space_flags(ref); ref 118 fs/btrfs/extent-tree.c percpu_counter_add_batch(&space_info->total_bytes_pinned, -ref->len, ref 458 fs/btrfs/extent-tree.c struct btrfs_extent_data_ref *ref) ref 460 fs/btrfs/extent-tree.c return hash_extent_data_ref(btrfs_extent_data_ref_root(leaf, ref), ref 461 fs/btrfs/extent-tree.c btrfs_extent_data_ref_objectid(leaf, ref), ref 462 fs/btrfs/extent-tree.c btrfs_extent_data_ref_offset(leaf, ref)); ref 466 fs/btrfs/extent-tree.c struct btrfs_extent_data_ref *ref, ref 469 fs/btrfs/extent-tree.c if (btrfs_extent_data_ref_root(leaf, ref) != root_objectid || ref 470 fs/btrfs/extent-tree.c btrfs_extent_data_ref_objectid(leaf, ref) != owner || ref 471 fs/btrfs/extent-tree.c btrfs_extent_data_ref_offset(leaf, ref) != offset) ref 484 fs/btrfs/extent-tree.c struct btrfs_extent_data_ref *ref; ref 534 fs/btrfs/extent-tree.c ref = btrfs_item_ptr(leaf, path->slots[0], ref 537 fs/btrfs/extent-tree.c if (match_extent_data_ref(leaf, ref, root_objectid, ref 583 fs/btrfs/extent-tree.c struct btrfs_shared_data_ref *ref; ref 584 fs/btrfs/extent-tree.c ref = btrfs_item_ptr(leaf, path->slots[0], ref 587 fs/btrfs/extent-tree.c btrfs_set_shared_data_ref_count(leaf, ref, refs_to_add); ref 589 fs/btrfs/extent-tree.c num_refs = btrfs_shared_data_ref_count(leaf, ref); ref 591 fs/btrfs/extent-tree.c btrfs_set_shared_data_ref_count(leaf, ref, num_refs); ref 594 fs/btrfs/extent-tree.c struct btrfs_extent_data_ref *ref; ref 596 fs/btrfs/extent-tree.c ref = btrfs_item_ptr(leaf, path->slots[0], ref 598 fs/btrfs/extent-tree.c if (match_extent_data_ref(leaf, ref, root_objectid, ref 610 fs/btrfs/extent-tree.c ref = btrfs_item_ptr(leaf, path->slots[0], ref 613 fs/btrfs/extent-tree.c btrfs_set_extent_data_ref_root(leaf, ref, ref 615 fs/btrfs/extent-tree.c btrfs_set_extent_data_ref_objectid(leaf, ref, owner); ref 616 fs/btrfs/extent-tree.c btrfs_set_extent_data_ref_offset(leaf, ref, offset); ref 617 fs/btrfs/extent-tree.c btrfs_set_extent_data_ref_count(leaf, ref, refs_to_add); ref 619 fs/btrfs/extent-tree.c num_refs = btrfs_extent_data_ref_count(leaf, ref); ref 621 fs/btrfs/extent-tree.c btrfs_set_extent_data_ref_count(leaf, ref, num_refs); ref 1493 fs/btrfs/extent-tree.c struct btrfs_delayed_data_ref *ref; ref 1503 fs/btrfs/extent-tree.c ref = btrfs_delayed_node_to_data_ref(node); ref 1504 fs/btrfs/extent-tree.c trace_run_delayed_data_ref(trans->fs_info, node, ref, node->action); ref 1507 fs/btrfs/extent-tree.c parent = ref->parent; ref 1508 fs/btrfs/extent-tree.c ref_root = ref->root; ref 1514 fs/btrfs/extent-tree.c flags, ref->objectid, ref 1515 fs/btrfs/extent-tree.c ref->offset, &ins, ref 1519 fs/btrfs/extent-tree.c ref->objectid, ref->offset, ref 1523 fs/btrfs/extent-tree.c ref_root, ref->objectid, ref 1524 fs/btrfs/extent-tree.c ref->offset, node->ref_mod, ref 1643 fs/btrfs/extent-tree.c struct btrfs_delayed_tree_ref *ref; ref 1647 fs/btrfs/extent-tree.c ref = btrfs_delayed_node_to_tree_ref(node); ref 1648 fs/btrfs/extent-tree.c trace_run_delayed_tree_ref(trans->fs_info, node, ref, node->action); ref 1651 fs/btrfs/extent-tree.c parent = ref->parent; ref 1652 fs/btrfs/extent-tree.c ref_root = ref->root; ref 1666 fs/btrfs/extent-tree.c ref->level, 0, 1, extent_op); ref 1669 fs/btrfs/extent-tree.c ref->level, 0, 1, extent_op); ref 1710 fs/btrfs/extent-tree.c struct btrfs_delayed_ref_node *ref; ref 1725 fs/btrfs/extent-tree.c ref = rb_entry(rb_first_cached(&head->ref_tree), ref 1727 fs/btrfs/extent-tree.c ASSERT(list_empty(&ref->add_list)); ref 1728 fs/btrfs/extent-tree.c return ref; ref 1904 fs/btrfs/extent-tree.c struct btrfs_delayed_ref_node *ref; ref 1913 fs/btrfs/extent-tree.c while ((ref = select_delayed_ref(locked_ref))) { ref 1914 fs/btrfs/extent-tree.c if (ref->seq && ref 1915 fs/btrfs/extent-tree.c btrfs_check_delayed_seq(fs_info, ref->seq)) { ref 1922 fs/btrfs/extent-tree.c ref->in_tree = 0; ref 1923 fs/btrfs/extent-tree.c rb_erase_cached(&ref->ref_node, &locked_ref->ref_tree); ref 1924 fs/btrfs/extent-tree.c RB_CLEAR_NODE(&ref->ref_node); ref 1925 fs/btrfs/extent-tree.c if (!list_empty(&ref->add_list)) ref 1926 fs/btrfs/extent-tree.c list_del(&ref->add_list); ref 1931 fs/btrfs/extent-tree.c switch (ref->action) { ref 1934 fs/btrfs/extent-tree.c locked_ref->ref_mod -= ref->ref_mod; ref 1937 fs/btrfs/extent-tree.c locked_ref->ref_mod += ref->ref_mod; ref 1955 fs/btrfs/extent-tree.c ret = run_one_delayed_ref(trans, ref, extent_op, ref 1961 fs/btrfs/extent-tree.c btrfs_put_delayed_ref(ref); ref 1967 fs/btrfs/extent-tree.c btrfs_put_delayed_ref(ref); ref 2246 fs/btrfs/extent-tree.c struct btrfs_delayed_ref_node *ref; ref 2295 fs/btrfs/extent-tree.c ref = rb_entry(node, struct btrfs_delayed_ref_node, ref_node); ref 2297 fs/btrfs/extent-tree.c if (ref->type != BTRFS_EXTENT_DATA_REF_KEY) { ref 2302 fs/btrfs/extent-tree.c data_ref = btrfs_delayed_node_to_data_ref(ref); ref 2328 fs/btrfs/extent-tree.c struct btrfs_extent_data_ref *ref; ref 2377 fs/btrfs/extent-tree.c ref = (struct btrfs_extent_data_ref *)(&iref->offset); ref 2379 fs/btrfs/extent-tree.c btrfs_extent_data_ref_count(leaf, ref) || ref 2380 fs/btrfs/extent-tree.c btrfs_extent_data_ref_root(leaf, ref) != ref 2382 fs/btrfs/extent-tree.c btrfs_extent_data_ref_objectid(leaf, ref) != objectid || ref 2383 fs/btrfs/extent-tree.c btrfs_extent_data_ref_offset(leaf, ref) != offset) ref 3304 fs/btrfs/extent-tree.c int btrfs_free_extent(struct btrfs_trans_handle *trans, struct btrfs_ref *ref) ref 3317 fs/btrfs/extent-tree.c if ((ref->type == BTRFS_REF_METADATA && ref 3318 fs/btrfs/extent-tree.c ref->tree_ref.root == BTRFS_TREE_LOG_OBJECTID) || ref 3319 fs/btrfs/extent-tree.c (ref->type == BTRFS_REF_DATA && ref 3320 fs/btrfs/extent-tree.c ref->data_ref.ref_root == BTRFS_TREE_LOG_OBJECTID)) { ref 3322 fs/btrfs/extent-tree.c btrfs_pin_extent(fs_info, ref->bytenr, ref->len, 1); ref 3325 fs/btrfs/extent-tree.c } else if (ref->type == BTRFS_REF_METADATA) { ref 3326 fs/btrfs/extent-tree.c ret = btrfs_add_delayed_tree_ref(trans, ref, NULL, ref 3329 fs/btrfs/extent-tree.c ret = btrfs_add_delayed_data_ref(trans, ref, 0, ref 3333 fs/btrfs/extent-tree.c if (!((ref->type == BTRFS_REF_METADATA && ref 3334 fs/btrfs/extent-tree.c ref->tree_ref.root == BTRFS_TREE_LOG_OBJECTID) || ref 3335 fs/btrfs/extent-tree.c (ref->type == BTRFS_REF_DATA && ref 3336 fs/btrfs/extent-tree.c ref->data_ref.ref_root == BTRFS_TREE_LOG_OBJECTID))) ref 3337 fs/btrfs/extent-tree.c btrfs_ref_tree_mod(fs_info, ref); ref 3340 fs/btrfs/extent-tree.c add_pinned_bytes(fs_info, ref); ref 4233 fs/btrfs/extent-tree.c struct btrfs_shared_data_ref *ref; ref 4234 fs/btrfs/extent-tree.c ref = (struct btrfs_shared_data_ref *)(iref + 1); ref 4236 fs/btrfs/extent-tree.c btrfs_set_shared_data_ref_count(leaf, ref, ref_mod); ref 4238 fs/btrfs/extent-tree.c struct btrfs_extent_data_ref *ref; ref 4239 fs/btrfs/extent-tree.c ref = (struct btrfs_extent_data_ref *)(&iref->offset); ref 4240 fs/btrfs/extent-tree.c btrfs_set_extent_data_ref_root(leaf, ref, root_objectid); ref 4241 fs/btrfs/extent-tree.c btrfs_set_extent_data_ref_objectid(leaf, ref, owner); ref 4242 fs/btrfs/extent-tree.c btrfs_set_extent_data_ref_offset(leaf, ref, offset); ref 4243 fs/btrfs/extent-tree.c btrfs_set_extent_data_ref_count(leaf, ref, ref_mod); ref 4275 fs/btrfs/extent-tree.c struct btrfs_delayed_tree_ref *ref; ref 4281 fs/btrfs/extent-tree.c ref = btrfs_delayed_node_to_tree_ref(node); ref 4285 fs/btrfs/extent-tree.c extent_key.offset = ref->level; ref 4320 fs/btrfs/extent-tree.c btrfs_set_tree_block_level(leaf, block_info, ref->level); ref 4328 fs/btrfs/extent-tree.c btrfs_set_extent_inline_ref_offset(leaf, iref, ref->parent); ref 4332 fs/btrfs/extent-tree.c btrfs_set_extent_inline_ref_offset(leaf, iref, ref->root); ref 4803 fs/btrfs/extent-tree.c struct btrfs_ref ref = { 0 }; ref 4976 fs/btrfs/extent-tree.c btrfs_init_generic_ref(&ref, BTRFS_DROP_DELAYED_REF, bytenr, ref 4978 fs/btrfs/extent-tree.c btrfs_init_tree_ref(&ref, level - 1, root->root_key.objectid); ref 4979 fs/btrfs/extent-tree.c ret = btrfs_free_extent(trans, &ref); ref 758 fs/btrfs/file.c struct btrfs_ref ref = { 0 }; ref 914 fs/btrfs/file.c btrfs_init_generic_ref(&ref, ref 917 fs/btrfs/file.c btrfs_init_data_ref(&ref, ref 921 fs/btrfs/file.c ret = btrfs_inc_extent_ref(trans, &ref); ref 1001 fs/btrfs/file.c btrfs_init_generic_ref(&ref, ref 1004 fs/btrfs/file.c btrfs_init_data_ref(&ref, ref 1008 fs/btrfs/file.c ret = btrfs_free_extent(trans, &ref); ref 1153 fs/btrfs/file.c struct btrfs_ref ref = { 0 }; ref 1299 fs/btrfs/file.c btrfs_init_generic_ref(&ref, BTRFS_ADD_DELAYED_REF, bytenr, ref 1301 fs/btrfs/file.c btrfs_init_data_ref(&ref, root->root_key.objectid, ino, ref 1303 fs/btrfs/file.c ret = btrfs_inc_extent_ref(trans, &ref); ref 1325 fs/btrfs/file.c btrfs_init_generic_ref(&ref, BTRFS_DROP_DELAYED_REF, bytenr, ref 1327 fs/btrfs/file.c btrfs_init_data_ref(&ref, root->root_key.objectid, ino, orig_offset); ref 1338 fs/btrfs/file.c ret = btrfs_free_extent(trans, &ref); ref 1356 fs/btrfs/file.c ret = btrfs_free_extent(trans, &ref); ref 2475 fs/btrfs/file.c struct btrfs_ref ref = { 0 }; ref 2509 fs/btrfs/file.c btrfs_init_generic_ref(&ref, BTRFS_ADD_DELAYED_REF, ref 2513 fs/btrfs/file.c btrfs_init_data_ref(&ref, root->root_key.objectid, ref 2515 fs/btrfs/file.c ret = btrfs_inc_extent_ref(trans, &ref); ref 15 fs/btrfs/inode-item.c struct btrfs_inode_ref *ref; ref 25 fs/btrfs/inode-item.c ref = (struct btrfs_inode_ref *)(ptr + cur_offset); ref 26 fs/btrfs/inode-item.c len = btrfs_inode_ref_name_len(leaf, ref); ref 27 fs/btrfs/inode-item.c name_ptr = (unsigned long)(ref + 1); ref 28 fs/btrfs/inode-item.c cur_offset += len + sizeof(*ref); ref 32 fs/btrfs/inode-item.c return ref; ref 178 fs/btrfs/inode-item.c struct btrfs_inode_ref *ref; ref 186 fs/btrfs/inode-item.c int del_len = name_len + sizeof(*ref); ref 207 fs/btrfs/inode-item.c ref = btrfs_find_name_in_backref(path->nodes[0], path->slots[0], name, ref 209 fs/btrfs/inode-item.c if (!ref) { ref 218 fs/btrfs/inode-item.c *index = btrfs_inode_ref_index(leaf, ref); ref 224 fs/btrfs/inode-item.c ptr = (unsigned long)ref; ref 225 fs/btrfs/inode-item.c sub_item_len = name_len + sizeof(*ref); ref 317 fs/btrfs/inode-item.c struct btrfs_inode_ref *ref; ref 320 fs/btrfs/inode-item.c int ins_len = name_len + sizeof(*ref); ref 336 fs/btrfs/inode-item.c ref = btrfs_find_name_in_backref(path->nodes[0], path->slots[0], ref 338 fs/btrfs/inode-item.c if (ref) ref 343 fs/btrfs/inode-item.c ref = btrfs_item_ptr(path->nodes[0], path->slots[0], ref 345 fs/btrfs/inode-item.c ref = (struct btrfs_inode_ref *)((unsigned long)ref + old_size); ref 346 fs/btrfs/inode-item.c btrfs_set_inode_ref_name_len(path->nodes[0], ref, name_len); ref 347 fs/btrfs/inode-item.c btrfs_set_inode_ref_index(path->nodes[0], ref, index); ref 348 fs/btrfs/inode-item.c ptr = (unsigned long)(ref + 1); ref 361 fs/btrfs/inode-item.c ref = btrfs_item_ptr(path->nodes[0], path->slots[0], ref 363 fs/btrfs/inode-item.c btrfs_set_inode_ref_name_len(path->nodes[0], ref, name_len); ref 364 fs/btrfs/inode-item.c btrfs_set_inode_ref_index(path->nodes[0], ref, index); ref 365 fs/btrfs/inode-item.c ptr = (unsigned long)(ref + 1); ref 2720 fs/btrfs/inode.c struct btrfs_ref ref = { 0 }; ref 2891 fs/btrfs/inode.c btrfs_init_generic_ref(&ref, BTRFS_ADD_DELAYED_REF, new->bytenr, ref 2893 fs/btrfs/inode.c btrfs_init_data_ref(&ref, backref->root_id, backref->inum, ref 2895 fs/btrfs/inode.c ret = btrfs_inc_extent_ref(trans, &ref); ref 3921 fs/btrfs/inode.c struct btrfs_inode_ref *ref; ref 3923 fs/btrfs/inode.c ref = (struct btrfs_inode_ref *)ptr; ref 3924 fs/btrfs/inode.c BTRFS_I(inode)->dir_index = btrfs_inode_ref_index(leaf, ref); ref 4944 fs/btrfs/inode.c struct btrfs_ref ref = { 0 }; ref 4948 fs/btrfs/inode.c btrfs_init_generic_ref(&ref, BTRFS_DROP_DELAYED_REF, ref 4950 fs/btrfs/inode.c ref.real_root = root->root_key.objectid; ref 4951 fs/btrfs/inode.c btrfs_init_data_ref(&ref, btrfs_header_owner(leaf), ref 4953 fs/btrfs/inode.c ret = btrfs_free_extent(trans, &ref); ref 5713 fs/btrfs/inode.c struct btrfs_root_ref *ref; ref 5738 fs/btrfs/inode.c ref = btrfs_item_ptr(leaf, path->slots[0], struct btrfs_root_ref); ref 5739 fs/btrfs/inode.c if (btrfs_root_ref_dirid(leaf, ref) != btrfs_ino(BTRFS_I(dir)) || ref 5740 fs/btrfs/inode.c btrfs_root_ref_name_len(leaf, ref) != dentry->d_name.len) ref 5744 fs/btrfs/inode.c (unsigned long)(ref + 1), ref 6423 fs/btrfs/inode.c struct btrfs_inode_ref *ref; ref 6504 fs/btrfs/inode.c sizes[1] = name_len + sizeof(*ref); ref 6538 fs/btrfs/inode.c ref = btrfs_item_ptr(path->nodes[0], path->slots[0] + 1, ref 6540 fs/btrfs/inode.c btrfs_set_inode_ref_name_len(path->nodes[0], ref, name_len); ref 6541 fs/btrfs/inode.c btrfs_set_inode_ref_index(path->nodes[0], ref, *index); ref 6542 fs/btrfs/inode.c ptr = (unsigned long)(ref + 1); ref 32 fs/btrfs/print-tree.c struct btrfs_extent_data_ref *ref) ref 35 fs/btrfs/print-tree.c btrfs_extent_data_ref_root(eb, ref), ref 36 fs/btrfs/print-tree.c btrfs_extent_data_ref_objectid(eb, ref), ref 37 fs/btrfs/print-tree.c btrfs_extent_data_ref_offset(eb, ref), ref 38 fs/btrfs/print-tree.c btrfs_extent_data_ref_count(eb, ref)); ref 51 fs/btrfs/ref-verify.c struct ref_entry ref; ref 162 fs/btrfs/ref-verify.c struct ref_entry *ref) ref 172 fs/btrfs/ref-verify.c cmp = comp_refs(entry, ref); ref 181 fs/btrfs/ref-verify.c rb_link_node(&ref->node, parent_node, p); ref 182 fs/btrfs/ref-verify.c rb_insert_color(&ref->node, root); ref 235 fs/btrfs/ref-verify.c struct ref_entry *ref; ref 246 fs/btrfs/ref-verify.c ref = rb_entry(n, struct ref_entry, node); ref 247 fs/btrfs/ref-verify.c rb_erase(&ref->node, &be->refs); ref 248 fs/btrfs/ref-verify.c kfree(ref); ref 312 fs/btrfs/ref-verify.c struct ref_entry *ref = NULL, *exist; ref 314 fs/btrfs/ref-verify.c ref = kmalloc(sizeof(struct ref_entry), GFP_KERNEL); ref 315 fs/btrfs/ref-verify.c if (!ref) ref 319 fs/btrfs/ref-verify.c ref->root_objectid = 0; ref 321 fs/btrfs/ref-verify.c ref->root_objectid = ref_root; ref 322 fs/btrfs/ref-verify.c ref->parent = parent; ref 323 fs/btrfs/ref-verify.c ref->owner = level; ref 324 fs/btrfs/ref-verify.c ref->offset = 0; ref 325 fs/btrfs/ref-verify.c ref->num_refs = 1; ref 329 fs/btrfs/ref-verify.c kfree(ref); ref 342 fs/btrfs/ref-verify.c exist = insert_ref_entry(&be->refs, ref); ref 345 fs/btrfs/ref-verify.c kfree(ref); ref 357 fs/btrfs/ref-verify.c struct ref_entry *ref; ref 359 fs/btrfs/ref-verify.c ref = kzalloc(sizeof(struct ref_entry), GFP_KERNEL); ref 360 fs/btrfs/ref-verify.c if (!ref) ref 364 fs/btrfs/ref-verify.c kfree(ref); ref 369 fs/btrfs/ref-verify.c ref->parent = parent; ref 370 fs/btrfs/ref-verify.c ref->num_refs = num_refs; ref 371 fs/btrfs/ref-verify.c if (insert_ref_entry(&be->refs, ref)) { ref 374 fs/btrfs/ref-verify.c kfree(ref); ref 387 fs/btrfs/ref-verify.c struct ref_entry *ref; ref 394 fs/btrfs/ref-verify.c ref = kzalloc(sizeof(struct ref_entry), GFP_KERNEL); ref 395 fs/btrfs/ref-verify.c if (!ref) ref 399 fs/btrfs/ref-verify.c kfree(ref); ref 404 fs/btrfs/ref-verify.c ref->parent = 0; ref 405 fs/btrfs/ref-verify.c ref->owner = owner; ref 406 fs/btrfs/ref-verify.c ref->root_objectid = ref_root; ref 407 fs/btrfs/ref-verify.c ref->offset = offset; ref 408 fs/btrfs/ref-verify.c ref->num_refs = num_refs; ref 409 fs/btrfs/ref-verify.c if (insert_ref_entry(&be->refs, ref)) { ref 412 fs/btrfs/ref-verify.c kfree(ref); ref 621 fs/btrfs/ref-verify.c ra->action, ra->root, ra->ref.root_objectid, ra->ref.parent, ref 622 fs/btrfs/ref-verify.c ra->ref.owner, ra->ref.offset, ra->ref.num_refs); ref 633 fs/btrfs/ref-verify.c struct ref_entry *ref; ref 644 fs/btrfs/ref-verify.c ref = rb_entry(n, struct ref_entry, node); ref 647 fs/btrfs/ref-verify.c ref->root_objectid, ref->parent, ref->owner, ref 648 fs/btrfs/ref-verify.c ref->offset, ref->num_refs); ref 672 fs/btrfs/ref-verify.c struct ref_entry *ref = NULL, *exist; ref 700 fs/btrfs/ref-verify.c ref = kzalloc(sizeof(struct ref_entry), GFP_NOFS); ref 702 fs/btrfs/ref-verify.c if (!ra || !ref) { ref 703 fs/btrfs/ref-verify.c kfree(ref); ref 710 fs/btrfs/ref-verify.c ref->parent = parent; ref 712 fs/btrfs/ref-verify.c ref->root_objectid = ref_root; ref 713 fs/btrfs/ref-verify.c ref->owner = owner; ref 714 fs/btrfs/ref-verify.c ref->offset = offset; ref 716 fs/btrfs/ref-verify.c ref->num_refs = (action == BTRFS_DROP_DELAYED_REF) ? -1 : 1; ref 718 fs/btrfs/ref-verify.c memcpy(&ra->ref, ref, sizeof(struct ref_entry)); ref 725 fs/btrfs/ref-verify.c ra->ref.owner = owner; ref 726 fs/btrfs/ref-verify.c ra->ref.offset = offset; ref 727 fs/btrfs/ref-verify.c ra->ref.root_objectid = ref_root; ref 747 fs/btrfs/ref-verify.c kfree(ref); ref 761 fs/btrfs/ref-verify.c kfree(ref); ref 780 fs/btrfs/ref-verify.c kfree(ref); ref 803 fs/btrfs/ref-verify.c kfree(ref); ref 817 fs/btrfs/ref-verify.c exist = insert_ref_entry(&be->refs, ref); ref 825 fs/btrfs/ref-verify.c kfree(ref); ref 841 fs/btrfs/ref-verify.c kfree(ref); ref 845 fs/btrfs/ref-verify.c kfree(ref); ref 1693 fs/btrfs/relocation.c struct btrfs_ref ref = { 0 }; ref 1755 fs/btrfs/relocation.c btrfs_init_generic_ref(&ref, BTRFS_ADD_DELAYED_REF, new_bytenr, ref 1757 fs/btrfs/relocation.c ref.real_root = root->root_key.objectid; ref 1758 fs/btrfs/relocation.c btrfs_init_data_ref(&ref, btrfs_header_owner(leaf), ref 1760 fs/btrfs/relocation.c ret = btrfs_inc_extent_ref(trans, &ref); ref 1766 fs/btrfs/relocation.c btrfs_init_generic_ref(&ref, BTRFS_DROP_DELAYED_REF, bytenr, ref 1768 fs/btrfs/relocation.c ref.real_root = root->root_key.objectid; ref 1769 fs/btrfs/relocation.c btrfs_init_data_ref(&ref, btrfs_header_owner(leaf), ref 1771 fs/btrfs/relocation.c ret = btrfs_free_extent(trans, &ref); ref 1813 fs/btrfs/relocation.c struct btrfs_ref ref = { 0 }; ref 1974 fs/btrfs/relocation.c btrfs_init_generic_ref(&ref, BTRFS_ADD_DELAYED_REF, old_bytenr, ref 1976 fs/btrfs/relocation.c ref.skip_qgroup = true; ref 1977 fs/btrfs/relocation.c btrfs_init_tree_ref(&ref, level - 1, src->root_key.objectid); ref 1978 fs/btrfs/relocation.c ret = btrfs_inc_extent_ref(trans, &ref); ref 1980 fs/btrfs/relocation.c btrfs_init_generic_ref(&ref, BTRFS_ADD_DELAYED_REF, new_bytenr, ref 1982 fs/btrfs/relocation.c ref.skip_qgroup = true; ref 1983 fs/btrfs/relocation.c btrfs_init_tree_ref(&ref, level - 1, dest->root_key.objectid); ref 1984 fs/btrfs/relocation.c ret = btrfs_inc_extent_ref(trans, &ref); ref 1987 fs/btrfs/relocation.c btrfs_init_generic_ref(&ref, BTRFS_DROP_DELAYED_REF, new_bytenr, ref 1989 fs/btrfs/relocation.c btrfs_init_tree_ref(&ref, level - 1, src->root_key.objectid); ref 1990 fs/btrfs/relocation.c ref.skip_qgroup = true; ref 1991 fs/btrfs/relocation.c ret = btrfs_free_extent(trans, &ref); ref 1994 fs/btrfs/relocation.c btrfs_init_generic_ref(&ref, BTRFS_DROP_DELAYED_REF, old_bytenr, ref 1996 fs/btrfs/relocation.c btrfs_init_tree_ref(&ref, level - 1, dest->root_key.objectid); ref 1997 fs/btrfs/relocation.c ref.skip_qgroup = true; ref 1998 fs/btrfs/relocation.c ret = btrfs_free_extent(trans, &ref); ref 2817 fs/btrfs/relocation.c struct btrfs_ref ref = { 0 }; ref 2923 fs/btrfs/relocation.c btrfs_init_generic_ref(&ref, BTRFS_ADD_DELAYED_REF, ref 2926 fs/btrfs/relocation.c ref.real_root = root->root_key.objectid; ref 2927 fs/btrfs/relocation.c btrfs_init_tree_ref(&ref, node->level, ref 2929 fs/btrfs/relocation.c ret = btrfs_inc_extent_ref(trans, &ref); ref 3651 fs/btrfs/relocation.c struct btrfs_extent_data_ref *ref, ref 3671 fs/btrfs/relocation.c ref_root = btrfs_extent_data_ref_root(leaf, ref); ref 3672 fs/btrfs/relocation.c ref_objectid = btrfs_extent_data_ref_objectid(leaf, ref); ref 3673 fs/btrfs/relocation.c ref_offset = btrfs_extent_data_ref_offset(leaf, ref); ref 3674 fs/btrfs/relocation.c ref_count = btrfs_extent_data_ref_count(leaf, ref); ref 358 fs/btrfs/root-tree.c struct btrfs_root_ref *ref; ref 377 fs/btrfs/root-tree.c ref = btrfs_item_ptr(leaf, path->slots[0], ref 379 fs/btrfs/root-tree.c ptr = (unsigned long)(ref + 1); ref 380 fs/btrfs/root-tree.c if ((btrfs_root_ref_dirid(leaf, ref) != dirid) || ref 381 fs/btrfs/root-tree.c (btrfs_root_ref_name_len(leaf, ref) != name_len) || ref 386 fs/btrfs/root-tree.c *sequence = btrfs_root_ref_sequence(leaf, ref); ref 432 fs/btrfs/root-tree.c struct btrfs_root_ref *ref; ref 445 fs/btrfs/root-tree.c sizeof(*ref) + name_len); ref 453 fs/btrfs/root-tree.c ref = btrfs_item_ptr(leaf, path->slots[0], struct btrfs_root_ref); ref 454 fs/btrfs/root-tree.c btrfs_set_root_ref_dirid(leaf, ref, dirid); ref 455 fs/btrfs/root-tree.c btrfs_set_root_ref_sequence(leaf, ref, sequence); ref 456 fs/btrfs/root-tree.c btrfs_set_root_ref_name_len(leaf, ref, name_len); ref 457 fs/btrfs/root-tree.c ptr = (unsigned long)(ref + 1); ref 2350 fs/btrfs/send.c struct btrfs_root_ref *ref; ref 2385 fs/btrfs/send.c ref = btrfs_item_ptr(leaf, path->slots[0], struct btrfs_root_ref); ref 2386 fs/btrfs/send.c namelen = btrfs_root_ref_name_len(leaf, ref); ref 2387 fs/btrfs/send.c read_extent_buffer(leaf, name, (unsigned long)(ref + 1), namelen); ref 2770 fs/btrfs/send.c static void set_ref_path(struct recorded_ref *ref, struct fs_path *path) ref 2772 fs/btrfs/send.c ref->full_path = path; ref 2773 fs/btrfs/send.c ref->name = (char *)kbasename(ref->full_path->start); ref 2774 fs/btrfs/send.c ref->name_len = ref->full_path->end - ref->name; ref 2785 fs/btrfs/send.c struct recorded_ref *ref; ref 2787 fs/btrfs/send.c ref = kmalloc(sizeof(*ref), GFP_KERNEL); ref 2788 fs/btrfs/send.c if (!ref) ref 2791 fs/btrfs/send.c ref->dir = dir; ref 2792 fs/btrfs/send.c ref->dir_gen = dir_gen; ref 2793 fs/btrfs/send.c set_ref_path(ref, path); ref 2794 fs/btrfs/send.c list_add_tail(&ref->list, head); ref 2798 fs/btrfs/send.c static int dup_ref(struct recorded_ref *ref, struct list_head *list) ref 2802 fs/btrfs/send.c new = kmalloc(sizeof(*ref), GFP_KERNEL); ref 2806 fs/btrfs/send.c new->dir = ref->dir; ref 2807 fs/btrfs/send.c new->dir_gen = ref->dir_gen; ref 3775 fs/btrfs/send.c static int update_ref_path(struct send_ctx *sctx, struct recorded_ref *ref) ref 3788 fs/btrfs/send.c ret = get_cur_path(sctx, ref->dir, ref->dir_gen, new_path); ref 3793 fs/btrfs/send.c ret = fs_path_add(new_path, ref->name, ref->name_len); ref 3799 fs/btrfs/send.c fs_path_free(ref->full_path); ref 3800 fs/btrfs/send.c set_ref_path(ref, new_path); ref 6098 fs/btrfs/send.c struct recorded_ref *ref; ref 6100 fs/btrfs/send.c ref = list_first_entry(&deleted_refs, struct recorded_ref, list); ref 6101 fs/btrfs/send.c ret = send_unlink(sctx, ref->full_path); ref 6104 fs/btrfs/send.c fs_path_free(ref->full_path); ref 6105 fs/btrfs/send.c list_del(&ref->list); ref 6106 fs/btrfs/send.c kfree(ref); ref 707 fs/btrfs/tree-log.c struct btrfs_ref ref = { 0 }; ref 719 fs/btrfs/tree-log.c btrfs_init_generic_ref(&ref, ref 722 fs/btrfs/tree-log.c btrfs_init_data_ref(&ref, ref 725 fs/btrfs/tree-log.c ret = btrfs_inc_extent_ref(trans, &ref); ref 949 fs/btrfs/tree-log.c struct btrfs_inode_ref *ref; ref 981 fs/btrfs/tree-log.c ref = (struct btrfs_inode_ref *)ptr; ref 982 fs/btrfs/tree-log.c found_name_len = btrfs_inode_ref_name_len(path->nodes[0], ref); ref 984 fs/btrfs/tree-log.c name_ptr = (unsigned long)(ref + 1); ref 992 fs/btrfs/tree-log.c ptr = (unsigned long)(ref + 1) + found_name_len; ref 1204 fs/btrfs/tree-log.c struct btrfs_inode_ref *ref; ref 1206 fs/btrfs/tree-log.c ref = (struct btrfs_inode_ref *)ref_ptr; ref 1208 fs/btrfs/tree-log.c *namelen = btrfs_inode_ref_name_len(eb, ref); ref 1213 fs/btrfs/tree-log.c read_extent_buffer(eb, *name, (unsigned long)(ref + 1), *namelen); ref 1216 fs/btrfs/tree-log.c *index = btrfs_inode_ref_index(eb, ref); ref 1676 fs/btrfs/tree-log.c struct btrfs_inode_ref *ref; ref 1678 fs/btrfs/tree-log.c ref = (struct btrfs_inode_ref *)ptr; ref 1680 fs/btrfs/tree-log.c ref); ref 1681 fs/btrfs/tree-log.c ptr = (unsigned long)(ref + 1) + name_len; ref 325 fs/ceph/mds_client.h atomic_t ref; ref 1027 fs/ceph/snap.c if (atomic_inc_return(&exist->ref) == 1) ref 1050 fs/ceph/snap.c atomic_set(&sm->ref, 1); ref 1069 fs/ceph/snap.c if (atomic_inc_return(&exist->ref) == 1) ref 1092 fs/ceph/snap.c if (atomic_dec_and_lock(&sm->ref, &mdsc->snapid_map_lock)) { ref 1154 fs/ceph/snap.c if (WARN_ON_ONCE(atomic_read(&sm->ref))) { ref 137 fs/cifs/cifs_dfs_ref.c const struct dfs_info3_param *ref, ref 153 fs/cifs/cifs_dfs_ref.c if (ref) { ref 154 fs/cifs/cifs_dfs_ref.c if (strlen(fullpath) - ref->path_consumed) { ref 155 fs/cifs/cifs_dfs_ref.c prepath = fullpath + ref->path_consumed; ref 161 fs/cifs/cifs_dfs_ref.c name = cifs_build_devname(ref->node_name, prepath); ref 81 fs/cifs/cifsproto.h const char *fullpath, const struct dfs_info3_param *ref, ref 4499 fs/cifs/connect.c struct dfs_info3_param ref = {0}; ref 4505 fs/cifs/connect.c rc = dfs_cache_get_tgt_referral(path, tgt_it, &ref); ref 4509 fs/cifs/connect.c mdata = cifs_compose_mount_options(cifs_sb->mountdata, path, &ref, ref 4511 fs/cifs/connect.c free_dfs_info_param(&ref); ref 258 fs/cifs/dfs_cache.c const struct dfs_info3_param *ref = &refs[i]; ref 269 fs/cifs/dfs_cache.c ref->flags, ref->path_consumed, ref->server_type, ref 270 fs/cifs/dfs_cache.c ref->ref_flag, ref->path_name, ref->node_name, ref 271 fs/cifs/dfs_cache.c ref->ttl, ref->ttl / 60); ref 727 fs/cifs/dfs_cache.c struct dfs_info3_param *ref, const char *tgt) ref 733 fs/cifs/dfs_cache.c memset(ref, 0, sizeof(*ref)); ref 735 fs/cifs/dfs_cache.c ref->path_name = kstrndup(path, strlen(path), GFP_KERNEL); ref 736 fs/cifs/dfs_cache.c if (!ref->path_name) ref 739 fs/cifs/dfs_cache.c ref->path_consumed = ce->ce_path_consumed; ref 741 fs/cifs/dfs_cache.c ref->node_name = kstrndup(tgt, strlen(tgt), GFP_KERNEL); ref 742 fs/cifs/dfs_cache.c if (!ref->node_name) { ref 747 fs/cifs/dfs_cache.c ref->ttl = ce->ce_ttl; ref 748 fs/cifs/dfs_cache.c ref->server_type = ce->ce_srvtype; ref 749 fs/cifs/dfs_cache.c ref->ref_flag = ce->ce_flags; ref 754 fs/cifs/dfs_cache.c kfree(ref->path_name); ref 755 fs/cifs/dfs_cache.c ref->path_name = NULL; ref 827 fs/cifs/dfs_cache.c const char *path, struct dfs_info3_param *ref, ref 844 fs/cifs/dfs_cache.c if (ref) ref 845 fs/cifs/dfs_cache.c rc = setup_ref(path, ce, ref, get_tgt_name(ce)); ref 874 fs/cifs/dfs_cache.c int dfs_cache_noreq_find(const char *path, struct dfs_info3_param *ref, ref 895 fs/cifs/dfs_cache.c if (ref) ref 896 fs/cifs/dfs_cache.c rc = setup_ref(path, ce, ref, get_tgt_name(ce)); ref 1046 fs/cifs/dfs_cache.c struct dfs_info3_param *ref) ref 1053 fs/cifs/dfs_cache.c if (!it || !ref) ref 1074 fs/cifs/dfs_cache.c rc = setup_ref(path, ce, ref, it->it_name); ref 1318 fs/cifs/dfs_cache.c struct dfs_info3_param ref = {0}; ref 1330 fs/cifs/dfs_cache.c rc = dfs_cache_noreq_find(rpath, &ref, NULL); ref 1336 fs/cifs/dfs_cache.c mdata = cifs_compose_mount_options(vi->vi_mntdata, rpath, &ref, ref 1338 fs/cifs/dfs_cache.c free_dfs_info_param(&ref); ref 31 fs/cifs/dfs_cache.h const char *path, struct dfs_info3_param *ref, ref 33 fs/cifs/dfs_cache.h extern int dfs_cache_noreq_find(const char *path, struct dfs_info3_param *ref, ref 45 fs/cifs/dfs_cache.h struct dfs_info3_param *ref); ref 696 fs/cifs/misc.c struct dfs_referral_level_3 *ref; ref 707 fs/cifs/misc.c ref = (struct dfs_referral_level_3 *) &(rsp->referrals); ref 708 fs/cifs/misc.c if (ref->VersionNumber != cpu_to_le16(3)) { ref 710 fs/cifs/misc.c le16_to_cpu(ref->VersionNumber)); ref 751 fs/cifs/misc.c node->server_type = le16_to_cpu(ref->ServerType); ref 752 fs/cifs/misc.c node->ref_flag = le16_to_cpu(ref->ReferralEntryFlags); ref 755 fs/cifs/misc.c temp = (char *)ref + le16_to_cpu(ref->DfsPathOffset); ref 765 fs/cifs/misc.c temp = (char *)ref + le16_to_cpu(ref->NetworkAddressOffset); ref 774 fs/cifs/misc.c node->ttl = le32_to_cpu(ref->TimeToLive); ref 776 fs/cifs/misc.c ref++; ref 597 fs/cifs/smb2ops.c smb2_close_cached_fid(struct kref *ref) ref 599 fs/cifs/smb2ops.c struct cached_fid *cfid = container_of(ref, struct cached_fid, ref 1244 fs/ext4/xattr.c u32 hash, ref; ref 1254 fs/ext4/xattr.c ref = le32_to_cpu(BHDR(bh)->h_refcount); ref 1255 fs/ext4/xattr.c if (ref == 1) { ref 1278 fs/ext4/xattr.c ref--; ref 1279 fs/ext4/xattr.c BHDR(bh)->h_refcount = cpu_to_le32(ref); ref 1280 fs/ext4/xattr.c if (ref == EXT4_XATTR_REFCOUNT_MAX - 1) { ref 1988 fs/ext4/xattr.c u32 ref; ref 2032 fs/ext4/xattr.c ref = le32_to_cpu(BHDR(new_bh)->h_refcount) + 1; ref 2033 fs/ext4/xattr.c BHDR(new_bh)->h_refcount = cpu_to_le32(ref); ref 2034 fs/ext4/xattr.c if (ref >= EXT4_XATTR_REFCOUNT_MAX) ref 2037 fs/ext4/xattr.c ref); ref 300 fs/f2fs/f2fs.h unsigned short ref; /* reference count */ ref 952 fs/f2fs/segment.c dc->ref = 0; ref 1013 fs/f2fs/segment.c f2fs_bug_on(sbi, dc->ref); ref 1565 fs/f2fs/segment.c dc->ref--; ref 1566 fs/f2fs/segment.c if (!dc->ref) { ref 1596 fs/f2fs/segment.c if (dc->state == D_DONE && !dc->ref) { ref 1602 fs/f2fs/segment.c dc->ref++; ref 1649 fs/f2fs/segment.c dc->ref++; ref 86 fs/fuse/virtio_fs.c static void release_virtio_fs_obj(struct kref *ref) ref 88 fs/fuse/virtio_fs.c struct virtio_fs *vfs = container_of(ref, struct virtio_fs, refcount); ref 389 fs/io_uring.c static void io_ring_ctx_ref_free(struct percpu_ref *ref) ref 391 fs/io_uring.c struct io_ring_ctx *ctx = container_of(ref, struct io_ring_ctx, refs); ref 30 fs/iomap/direct-io.c atomic_t ref; ref 64 fs/iomap/direct-io.c atomic_inc(&dio->ref); ref 154 fs/iomap/direct-io.c if (atomic_dec_and_test(&dio->ref)) { ref 419 fs/iomap/direct-io.c atomic_set(&dio->ref, 1); ref 543 fs/iomap/direct-io.c if (!atomic_dec_and_test(&dio->ref)) { ref 404 fs/jffs2/debug.c struct jffs2_raw_node_ref *ref; ref 414 fs/jffs2/debug.c for (ref = jeb->first_node; ; ref = ref_next(ref)) { ref 415 fs/jffs2/debug.c printk("%#08x", ref_offset(ref)); ref 417 fs/jffs2/debug.c printk("(%x)", ref->__totlen); ref 419 fs/jffs2/debug.c if (ref_next(ref)) ref 209 fs/jffs2/erase.c struct jffs2_raw_node_ref *ref, struct jffs2_eraseblock *jeb) ref 214 fs/jffs2/erase.c prev = &ref->next_in_ino; ref 235 fs/jffs2/erase.c if (this == ref) ref 291 fs/jffs2/erase.c struct jffs2_raw_node_ref *block, *ref; ref 295 fs/jffs2/erase.c block = ref = jeb->first_node; ref 297 fs/jffs2/erase.c while (ref) { ref 298 fs/jffs2/erase.c if (ref->flash_offset == REF_LINK_NODE) { ref 299 fs/jffs2/erase.c ref = ref->next_in_ino; ref 301 fs/jffs2/erase.c block = ref; ref 304 fs/jffs2/erase.c if (ref->flash_offset != REF_EMPTY_NODE && ref->next_in_ino) ref 305 fs/jffs2/erase.c jffs2_remove_node_refs_from_ino_list(c, ref, jeb); ref 308 fs/jffs2/erase.c ref++; ref 204 fs/jffs2/malloc.c struct jffs2_raw_node_ref **p, *ref; ref 210 fs/jffs2/malloc.c ref = *p; ref 215 fs/jffs2/malloc.c if (ref && ref->flash_offset != REF_EMPTY_NODE) ref 216 fs/jffs2/malloc.c ref++; ref 219 fs/jffs2/malloc.c if (!ref) { ref 221 fs/jffs2/malloc.c ref = *p = jffs2_alloc_refblock(); ref 222 fs/jffs2/malloc.c if (!ref) ref 225 fs/jffs2/malloc.c if (ref->flash_offset == REF_LINK_NODE) { ref 226 fs/jffs2/malloc.c p = &ref->next_in_ino; ref 227 fs/jffs2/malloc.c ref = *p; ref 231 fs/jffs2/malloc.c ref++; ref 299 fs/jffs2/malloc.c struct jffs2_xattr_ref *ref; ref 300 fs/jffs2/malloc.c ref = kmem_cache_zalloc(xattr_ref_cache, GFP_KERNEL); ref 301 fs/jffs2/malloc.c dbg_memalloc("%p\n", ref); ref 302 fs/jffs2/malloc.c if (!ref) ref 305 fs/jffs2/malloc.c ref->class = RAWNODE_CLASS_XATTR_REF; ref 306 fs/jffs2/malloc.c ref->node = (void *)ref; ref 307 fs/jffs2/malloc.c return ref; ref 310 fs/jffs2/malloc.c void jffs2_free_xattr_ref(struct jffs2_xattr_ref *ref) ref 312 fs/jffs2/malloc.c dbg_memalloc("%p\n", ref); ref 313 fs/jffs2/malloc.c kmem_cache_free(xattr_ref_cache, ref); ref 590 fs/jffs2/nodelist.c struct jffs2_raw_node_ref *ref; ref 595 fs/jffs2/nodelist.c ref = jeb->last_node; ref 597 fs/jffs2/nodelist.c dbg_noderef("Last node at %p is (%08x,%p)\n", ref, ref->flash_offset, ref 598 fs/jffs2/nodelist.c ref->next_in_ino); ref 600 fs/jffs2/nodelist.c while (ref->flash_offset != REF_EMPTY_NODE) { ref 601 fs/jffs2/nodelist.c if (ref->flash_offset == REF_LINK_NODE) ref 602 fs/jffs2/nodelist.c ref = ref->next_in_ino; ref 604 fs/jffs2/nodelist.c ref++; ref 607 fs/jffs2/nodelist.c dbg_noderef("New ref is %p (%08x becomes %08x,%p) len 0x%x\n", ref, ref 608 fs/jffs2/nodelist.c ref->flash_offset, ofs, ref->next_in_ino, len); ref 610 fs/jffs2/nodelist.c ref->flash_offset = ofs; ref 613 fs/jffs2/nodelist.c jeb->first_node = ref; ref 614 fs/jffs2/nodelist.c BUG_ON(ref_offset(ref) != jeb->offset); ref 615 fs/jffs2/nodelist.c } else if (unlikely(ref_offset(ref) != jeb->offset + c->sector_size - jeb->free_size)) { ref 619 fs/jffs2/nodelist.c ref, ref_offset(ref), ref_offset(ref)+len, ref 624 fs/jffs2/nodelist.c jeb->last_node = ref; ref 627 fs/jffs2/nodelist.c ref->next_in_ino = ic->nodes; ref 628 fs/jffs2/nodelist.c ic->nodes = ref; ref 630 fs/jffs2/nodelist.c ref->next_in_ino = NULL; ref 633 fs/jffs2/nodelist.c switch(ref_flags(ref)) { ref 655 fs/jffs2/nodelist.c ref->__totlen = len; ref 656 fs/jffs2/nodelist.c ref_totlen(c, jeb, ref); ref 658 fs/jffs2/nodelist.c return ref; ref 694 fs/jffs2/nodelist.c struct jffs2_raw_node_ref *ref) ref 697 fs/jffs2/nodelist.c struct jffs2_raw_node_ref *next_ref = ref_next(ref); ref 703 fs/jffs2/nodelist.c jeb = &c->blocks[ref->flash_offset / c->sector_size]; ref 706 fs/jffs2/nodelist.c if (unlikely(ref != jeb->last_node)) { ref 708 fs/jffs2/nodelist.c ref, ref_offset(ref), jeb->last_node, ref 715 fs/jffs2/nodelist.c return ref_end - ref_offset(ref); ref 719 fs/jffs2/nodelist.c struct jffs2_raw_node_ref *ref) ref 723 fs/jffs2/nodelist.c ret = __ref_totlen(c, jeb, ref); ref 726 fs/jffs2/nodelist.c if (unlikely(ret != ref->__totlen)) { ref 728 fs/jffs2/nodelist.c jeb = &c->blocks[ref->flash_offset / c->sector_size]; ref 731 fs/jffs2/nodelist.c ref, ref_offset(ref), ref_offset(ref) + ref->__totlen, ref 732 fs/jffs2/nodelist.c ret, ref->__totlen); ref 733 fs/jffs2/nodelist.c if (ref_next(ref)) { ref 735 fs/jffs2/nodelist.c ref_next(ref), ref_offset(ref_next(ref)), ref 736 fs/jffs2/nodelist.c ref_offset(ref_next(ref)) + ref->__totlen); ref 751 fs/jffs2/nodelist.c ret = ref->__totlen; ref 101 fs/jffs2/nodelist.h static inline struct jffs2_raw_node_ref *ref_next(struct jffs2_raw_node_ref *ref) ref 103 fs/jffs2/nodelist.h ref++; ref 106 fs/jffs2/nodelist.h if (ref->flash_offset == REF_LINK_NODE) { ref 107 fs/jffs2/nodelist.h ref = ref->next_in_ino; ref 108 fs/jffs2/nodelist.h if (!ref) ref 109 fs/jffs2/nodelist.h return ref; ref 113 fs/jffs2/nodelist.h if (ref->flash_offset == REF_EMPTY_NODE) ref 116 fs/jffs2/nodelist.h return ref; ref 136 fs/jffs2/nodelist.h #define ref_flags(ref) ((ref)->flash_offset & 3) ref 137 fs/jffs2/nodelist.h #define ref_offset(ref) ((ref)->flash_offset & ~3) ref 138 fs/jffs2/nodelist.h #define ref_obsolete(ref) (((ref)->flash_offset & 3) == REF_OBSOLETE) ref 139 fs/jffs2/nodelist.h #define mark_ref_normal(ref) do { (ref)->flash_offset = ref_offset(ref) | REF_NORMAL; } while(0) ref 381 fs/jffs2/nodelist.h struct jffs2_raw_node_ref *ref); ref 579 fs/jffs2/nodemgmt.c void jffs2_mark_node_obsolete(struct jffs2_sb_info *c, struct jffs2_raw_node_ref *ref) ref 588 fs/jffs2/nodemgmt.c if(unlikely(!ref)) { ref 592 fs/jffs2/nodemgmt.c if (ref_obsolete(ref)) { ref 594 fs/jffs2/nodemgmt.c __func__, ref_offset(ref)); ref 597 fs/jffs2/nodemgmt.c blocknr = ref->flash_offset / c->sector_size; ref 600 fs/jffs2/nodemgmt.c ref->flash_offset); ref 618 fs/jffs2/nodemgmt.c freed_len = ref_totlen(c, jeb, ref); ref 620 fs/jffs2/nodemgmt.c if (ref_flags(ref) == REF_UNCHECKED) { ref 624 fs/jffs2/nodemgmt.c ref->flash_offset, jeb->used_size); ref 628 fs/jffs2/nodemgmt.c ref_offset(ref), freed_len); ref 635 fs/jffs2/nodemgmt.c ref->flash_offset, jeb->used_size); ref 639 fs/jffs2/nodemgmt.c ref_offset(ref), freed_len); ref 673 fs/jffs2/nodemgmt.c ref->flash_offset = ref_offset(ref) | REF_OBSOLETE; ref 757 fs/jffs2/nodemgmt.c ref_offset(ref)); ref 758 fs/jffs2/nodemgmt.c ret = jffs2_flash_read(c, ref_offset(ref), sizeof(n), &retlen, (char *)&n); ref 761 fs/jffs2/nodemgmt.c ref_offset(ref), ret); ref 766 fs/jffs2/nodemgmt.c ref_offset(ref), retlen); ref 776 fs/jffs2/nodemgmt.c ref_offset(ref), je16_to_cpu(n.nodetype)); ref 781 fs/jffs2/nodemgmt.c ret = jffs2_flash_write(c, ref_offset(ref), sizeof(n), &retlen, (char *)&n); ref 784 fs/jffs2/nodemgmt.c ref_offset(ref), ret); ref 789 fs/jffs2/nodemgmt.c ref_offset(ref), retlen); ref 803 fs/jffs2/nodemgmt.c if (ref->next_in_ino) { ref 809 fs/jffs2/nodemgmt.c ic = jffs2_raw_ref_to_ic(ref); ref 810 fs/jffs2/nodemgmt.c for (p = &ic->nodes; (*p) != ref; p = &((*p)->next_in_ino)) ref 813 fs/jffs2/nodemgmt.c *p = ref->next_in_ino; ref 814 fs/jffs2/nodemgmt.c ref->next_in_ino = NULL; ref 33 fs/jffs2/readinode.c struct jffs2_raw_node_ref *ref = tn->fn->raw; ref 43 fs/jffs2/readinode.c ofs = ref_offset(ref) + sizeof(struct jffs2_raw_inode); ref 53 fs/jffs2/readinode.c ref_offset(ref), tn->csize, ofs); ref 62 fs/jffs2/readinode.c ref_offset(ref), tn->csize, tn->partial_crc, tn->data_crc, ofs - len, ofs, len); ref 109 fs/jffs2/readinode.c ref_offset(ref), tn->data_crc, crc); ref 114 fs/jffs2/readinode.c jeb = &c->blocks[ref->flash_offset / c->sector_size]; ref 115 fs/jffs2/readinode.c len = ref_totlen(c, jeb, ref); ref 120 fs/jffs2/readinode.c ref->flash_offset |= REF_PRISTINE; ref 568 fs/jffs2/readinode.c static struct jffs2_raw_node_ref *jffs2_first_valid_node(struct jffs2_raw_node_ref *ref) ref 570 fs/jffs2/readinode.c while (ref && ref->next_in_ino) { ref 571 fs/jffs2/readinode.c if (!ref_obsolete(ref)) ref 572 fs/jffs2/readinode.c return ref; ref 573 fs/jffs2/readinode.c dbg_noderef("node at 0x%08x is obsoleted. Ignoring.\n", ref_offset(ref)); ref 574 fs/jffs2/readinode.c ref = ref->next_in_ino; ref 586 fs/jffs2/readinode.c static inline int read_direntry(struct jffs2_sb_info *c, struct jffs2_raw_node_ref *ref, ref 594 fs/jffs2/readinode.c BUG_ON(ref_obsolete(ref)); ref 599 fs/jffs2/readinode.c ref_offset(ref), je32_to_cpu(rd->node_crc), crc); ref 600 fs/jffs2/readinode.c jffs2_mark_node_obsolete(c, ref); ref 605 fs/jffs2/readinode.c if (ref_flags(ref) == REF_UNCHECKED) { ref 612 fs/jffs2/readinode.c ref_offset(ref), rd->nsize, je32_to_cpu(rd->totlen)); ref 613 fs/jffs2/readinode.c jffs2_mark_node_obsolete(c, ref); ref 617 fs/jffs2/readinode.c jeb = &c->blocks[ref->flash_offset / c->sector_size]; ref 618 fs/jffs2/readinode.c len = ref_totlen(c, jeb, ref); ref 625 fs/jffs2/readinode.c ref->flash_offset = ref_offset(ref) | dirent_node_state(rd); ref 633 fs/jffs2/readinode.c fd->raw = ref; ref 661 fs/jffs2/readinode.c err = jffs2_flash_read(c, (ref_offset(ref)) + read, ref 697 fs/jffs2/readinode.c static inline int read_dnode(struct jffs2_sb_info *c, struct jffs2_raw_node_ref *ref, ref 707 fs/jffs2/readinode.c BUG_ON(ref_obsolete(ref)); ref 712 fs/jffs2/readinode.c ref_offset(ref), je32_to_cpu(rd->node_crc), crc); ref 713 fs/jffs2/readinode.c jffs2_mark_node_obsolete(c, ref); ref 727 fs/jffs2/readinode.c if (ref_flags(ref) == REF_UNCHECKED) { ref 732 fs/jffs2/readinode.c JFFS2_WARNING("inode node header CRC is corrupted at %#08x\n", ref_offset(ref)); ref 733 fs/jffs2/readinode.c jffs2_dbg_dump_node(c, ref_offset(ref)); ref 734 fs/jffs2/readinode.c jffs2_mark_node_obsolete(c, ref); ref 788 fs/jffs2/readinode.c ref_offset(ref), tn->partial_crc, je32_to_cpu(rd->data_crc)); ref 789 fs/jffs2/readinode.c jffs2_mark_node_obsolete(c, ref); ref 803 fs/jffs2/readinode.c jeb = &c->blocks[ref->flash_offset / c->sector_size]; ref 804 fs/jffs2/readinode.c len = ref_totlen(c, jeb, ref); ref 811 fs/jffs2/readinode.c ref->flash_offset = ref_offset(ref) | REF_NORMAL; ref 827 fs/jffs2/readinode.c tn->fn->raw = ref; ref 841 fs/jffs2/readinode.c ref_offset(ref), je32_to_cpu(rd->version), ref 872 fs/jffs2/readinode.c static inline int read_unknown(struct jffs2_sb_info *c, struct jffs2_raw_node_ref *ref, struct jffs2_unknown_node *un) ref 875 fs/jffs2/readinode.c if (ref_flags(ref) == REF_UNCHECKED) { ref 877 fs/jffs2/readinode.c ref_offset(ref)); ref 881 fs/jffs2/readinode.c jffs2_mark_node_obsolete(c, ref); ref 891 fs/jffs2/readinode.c je16_to_cpu(un->nodetype), ref_offset(ref)); ref 898 fs/jffs2/readinode.c je16_to_cpu(un->nodetype), ref_offset(ref)); ref 904 fs/jffs2/readinode.c je16_to_cpu(un->nodetype), ref_offset(ref)); ref 909 fs/jffs2/readinode.c je16_to_cpu(un->nodetype), ref_offset(ref)); ref 910 fs/jffs2/readinode.c jffs2_mark_node_obsolete(c, ref); ref 924 fs/jffs2/readinode.c static int read_more(struct jffs2_sb_info *c, struct jffs2_raw_node_ref *ref, ref 939 fs/jffs2/readinode.c offs = ref_offset(ref) + *rdlen; ref 968 fs/jffs2/readinode.c struct jffs2_raw_node_ref *ref, *valid_ref; ref 996 fs/jffs2/readinode.c ref = valid_ref; ref 997 fs/jffs2/readinode.c valid_ref = jffs2_first_valid_node(ref->next_in_ino); ref 1019 fs/jffs2/readinode.c end = ref_offset(ref) + len; ref 1023 fs/jffs2/readinode.c len = end - ref_offset(ref); ref 1026 fs/jffs2/readinode.c dbg_readinode("read %d bytes at %#08x(%d).\n", len, ref_offset(ref), ref_flags(ref)); ref 1029 fs/jffs2/readinode.c err = jffs2_flash_read(c, ref_offset(ref), len, &retlen, buf); ref 1031 fs/jffs2/readinode.c JFFS2_ERROR("can not read %d bytes from 0x%08x, error code: %d.\n", len, ref_offset(ref), err); ref 1036 fs/jffs2/readinode.c JFFS2_ERROR("short read at %#08x: %zu instead of %d.\n", ref_offset(ref), retlen, len); ref 1046 fs/jffs2/readinode.c ref_offset(ref), je16_to_cpu(node->u.magic), ref 1050 fs/jffs2/readinode.c jffs2_dbg_dump_node(c, ref_offset(ref)); ref 1051 fs/jffs2/readinode.c jffs2_mark_node_obsolete(c, ref); ref 1057 fs/jffs2/readinode.c je16_to_cpu(node->u.magic), ref_offset(ref)); ref 1058 fs/jffs2/readinode.c jffs2_mark_node_obsolete(c, ref); ref 1068 fs/jffs2/readinode.c err = read_more(c, ref, sizeof(struct jffs2_raw_dirent), &len, buf); ref 1073 fs/jffs2/readinode.c err = read_direntry(c, ref, &node->d, retlen, rii); ref 1083 fs/jffs2/readinode.c err = read_more(c, ref, sizeof(struct jffs2_raw_inode), &len, buf); ref 1088 fs/jffs2/readinode.c err = read_dnode(c, ref, &node->i, len, rii); ref 1097 fs/jffs2/readinode.c err = read_more(c, ref, sizeof(struct jffs2_unknown_node), &len, buf); ref 1102 fs/jffs2/readinode.c err = read_unknown(c, ref, &node->u); ref 387 fs/jffs2/scan.c struct jffs2_xattr_ref *ref; ref 409 fs/jffs2/scan.c ref = jffs2_alloc_xattr_ref(); ref 410 fs/jffs2/scan.c if (!ref) ref 422 fs/jffs2/scan.c ref->ino = je32_to_cpu(rr->ino); ref 423 fs/jffs2/scan.c ref->xid = je32_to_cpu(rr->xid); ref 424 fs/jffs2/scan.c ref->xseqno = je32_to_cpu(rr->xseqno); ref 425 fs/jffs2/scan.c if (ref->xseqno > c->highest_xseqno) ref 426 fs/jffs2/scan.c c->highest_xseqno = (ref->xseqno & ~XREF_DELETE_MARKER); ref 427 fs/jffs2/scan.c ref->next = c->xref_temp; ref 428 fs/jffs2/scan.c c->xref_temp = ref; ref 430 fs/jffs2/scan.c jffs2_link_node_ref(c, jeb, ofs | REF_PRISTINE, PAD(je32_to_cpu(rr->totlen)), (void *)ref); ref 435 fs/jffs2/scan.c ofs, ref->xid, ref->ino); ref 523 fs/jffs2/summary.c struct jffs2_xattr_ref *ref; ref 532 fs/jffs2/summary.c ref = jffs2_alloc_xattr_ref(); ref 533 fs/jffs2/summary.c if (!ref) { ref 537 fs/jffs2/summary.c ref->next = c->xref_temp; ref 538 fs/jffs2/summary.c c->xref_temp = ref; ref 541 fs/jffs2/summary.c PAD(sizeof(struct jffs2_raw_xref)), (void *)ref); ref 543 fs/jffs2/summary.c *pseudo_random += ref->node->flash_offset; ref 444 fs/jffs2/xattr.c static int verify_xattr_ref(struct jffs2_sb_info *c, struct jffs2_xattr_ref *ref) ref 454 fs/jffs2/xattr.c if (ref_flags(ref->node) != REF_UNCHECKED) ref 456 fs/jffs2/xattr.c offset = ref_offset(ref->node); ref 482 fs/jffs2/xattr.c ref->ino = je32_to_cpu(rr.ino); ref 483 fs/jffs2/xattr.c ref->xid = je32_to_cpu(rr.xid); ref 484 fs/jffs2/xattr.c ref->xseqno = je32_to_cpu(rr.xseqno); ref 485 fs/jffs2/xattr.c if (ref->xseqno > c->highest_xseqno) ref 486 fs/jffs2/xattr.c c->highest_xseqno = (ref->xseqno & ~XREF_DELETE_MARKER); ref 490 fs/jffs2/xattr.c for (raw=ref->node; raw != (void *)ref; raw=raw->next_in_ino) { ref 497 fs/jffs2/xattr.c raw->flash_offset = ref_offset(raw) | ((ref->node==raw) ? REF_PRISTINE : REF_NORMAL); ref 502 fs/jffs2/xattr.c ref->ino, ref->xid, ref_offset(ref->node)); ref 506 fs/jffs2/xattr.c static int save_xattr_ref(struct jffs2_sb_info *c, struct jffs2_xattr_ref *ref) ref 520 fs/jffs2/xattr.c if (is_xattr_ref_dead(ref)) { ref 522 fs/jffs2/xattr.c rr.ino = cpu_to_je32(ref->ino); ref 523 fs/jffs2/xattr.c rr.xid = cpu_to_je32(ref->xid); ref 525 fs/jffs2/xattr.c rr.ino = cpu_to_je32(ref->ic->ino); ref 526 fs/jffs2/xattr.c rr.xid = cpu_to_je32(ref->xd->xid); ref 542 fs/jffs2/xattr.c ref->xseqno = xseqno; ref 543 fs/jffs2/xattr.c jffs2_add_physical_node_ref(c, phys_ofs | REF_PRISTINE, PAD(sizeof(rr)), (void *)ref); ref 545 fs/jffs2/xattr.c dbg_xattr("success on saving xref (ino=%u, xid=%u)\n", ref->ic->ino, ref->xd->xid); ref 554 fs/jffs2/xattr.c struct jffs2_xattr_ref *ref; ref 557 fs/jffs2/xattr.c ref = jffs2_alloc_xattr_ref(); ref 558 fs/jffs2/xattr.c if (!ref) ref 560 fs/jffs2/xattr.c ref->ic = ic; ref 561 fs/jffs2/xattr.c ref->xd = xd; ref 563 fs/jffs2/xattr.c ret = save_xattr_ref(c, ref); ref 565 fs/jffs2/xattr.c jffs2_free_xattr_ref(ref); ref 570 fs/jffs2/xattr.c ref->next = ic->xref; ref 571 fs/jffs2/xattr.c ic->xref = ref; ref 573 fs/jffs2/xattr.c return ref; /* success */ ref 576 fs/jffs2/xattr.c static void delete_xattr_ref(struct jffs2_sb_info *c, struct jffs2_xattr_ref *ref) ref 581 fs/jffs2/xattr.c xd = ref->xd; ref 582 fs/jffs2/xattr.c ref->xseqno |= XREF_DELETE_MARKER; ref 583 fs/jffs2/xattr.c ref->ino = ref->ic->ino; ref 584 fs/jffs2/xattr.c ref->xid = ref->xd->xid; ref 586 fs/jffs2/xattr.c ref->next = c->xref_dead_list; ref 587 fs/jffs2/xattr.c c->xref_dead_list = ref; ref 591 fs/jffs2/xattr.c ref->ino, ref->xid, ref->xseqno); ref 600 fs/jffs2/xattr.c struct jffs2_xattr_ref *ref, *_ref; ref 606 fs/jffs2/xattr.c for (ref = ic->xref; ref; ref = _ref) { ref 607 fs/jffs2/xattr.c _ref = ref->next; ref 608 fs/jffs2/xattr.c delete_xattr_ref(c, ref); ref 618 fs/jffs2/xattr.c struct jffs2_xattr_ref *ref, *_ref; ref 621 fs/jffs2/xattr.c for (ref = ic->xref; ref; ref = _ref) { ref 622 fs/jffs2/xattr.c _ref = ref->next; ref 623 fs/jffs2/xattr.c xd = ref->xd; ref 628 fs/jffs2/xattr.c jffs2_free_xattr_ref(ref); ref 640 fs/jffs2/xattr.c struct jffs2_xattr_ref *ref, *cmp, **pref, **pcmp; ref 648 fs/jffs2/xattr.c for (ref=ic->xref, pref=&ic->xref; ref; pref=&ref->next, ref=ref->next) { ref 649 fs/jffs2/xattr.c if (!ref->xd->xname) { ref 650 fs/jffs2/xattr.c rc = load_xattr_datum(c, ref->xd); ref 652 fs/jffs2/xattr.c *pref = ref->next; ref 653 fs/jffs2/xattr.c delete_xattr_ref(c, ref); ref 658 fs/jffs2/xattr.c for (cmp=ref->next, pcmp=&ref->next; cmp; pcmp=&cmp->next, cmp=cmp->next) { ref 660 fs/jffs2/xattr.c ref->xd->flags |= JFFS2_XFLAGS_BIND; ref 662 fs/jffs2/xattr.c ref->xd->flags &= ~JFFS2_XFLAGS_BIND; ref 670 fs/jffs2/xattr.c if (ref->xd->xprefix == cmp->xd->xprefix ref 671 fs/jffs2/xattr.c && !strcmp(ref->xd->xname, cmp->xd->xname)) { ref 672 fs/jffs2/xattr.c if (ref->xseqno > cmp->xseqno) { ref 676 fs/jffs2/xattr.c *pref = ref->next; ref 677 fs/jffs2/xattr.c delete_xattr_ref(c, ref); ref 743 fs/jffs2/xattr.c struct jffs2_xattr_ref *ref, *_ref; ref 746 fs/jffs2/xattr.c for (ref=c->xref_temp; ref; ref = _ref) { ref 747 fs/jffs2/xattr.c _ref = ref->next; ref 748 fs/jffs2/xattr.c jffs2_free_xattr_ref(ref); ref 751 fs/jffs2/xattr.c for (ref=c->xref_dead_list; ref; ref = _ref) { ref 752 fs/jffs2/xattr.c _ref = ref->next; ref 753 fs/jffs2/xattr.c jffs2_free_xattr_ref(ref); ref 777 fs/jffs2/xattr.c struct jffs2_xattr_ref *ref, *_ref; ref 790 fs/jffs2/xattr.c for (ref=c->xref_temp; ref; ref=_ref) { ref 793 fs/jffs2/xattr.c _ref = ref->next; ref 794 fs/jffs2/xattr.c if (ref_flags(ref->node) != REF_PRISTINE) { ref 795 fs/jffs2/xattr.c if (verify_xattr_ref(c, ref)) { ref 796 fs/jffs2/xattr.c BUG_ON(ref->node->next_in_ino != (void *)ref); ref 797 fs/jffs2/xattr.c ref->node->next_in_ino = NULL; ref 798 fs/jffs2/xattr.c jffs2_mark_node_obsolete(c, ref->node); ref 799 fs/jffs2/xattr.c jffs2_free_xattr_ref(ref); ref 804 fs/jffs2/xattr.c i = (ref->ino ^ ref->xid) % XREF_TMPHASH_SIZE; ref 806 fs/jffs2/xattr.c if (tmp->ino == ref->ino && tmp->xid == ref->xid) ref 810 fs/jffs2/xattr.c raw = ref->node; ref 811 fs/jffs2/xattr.c if (ref->xseqno > tmp->xseqno) { ref 812 fs/jffs2/xattr.c tmp->xseqno = ref->xseqno; ref 819 fs/jffs2/xattr.c jffs2_free_xattr_ref(ref); ref 822 fs/jffs2/xattr.c ref->next = xref_tmphash[i]; ref 823 fs/jffs2/xattr.c xref_tmphash[i] = ref; ref 830 fs/jffs2/xattr.c for (ref=xref_tmphash[i]; ref; ref=_ref) { ref 832 fs/jffs2/xattr.c _ref = ref->next; ref 833 fs/jffs2/xattr.c if (is_xattr_ref_dead(ref)) { ref 834 fs/jffs2/xattr.c ref->next = c->xref_dead_list; ref 835 fs/jffs2/xattr.c c->xref_dead_list = ref; ref 841 fs/jffs2/xattr.c xd = jffs2_find_xattr_datum(c, ref->xid); ref 842 fs/jffs2/xattr.c ic = jffs2_get_ino_cache(c, ref->ino); ref 845 fs/jffs2/xattr.c ref->ino, ref->xid, ref->xseqno); ref 846 fs/jffs2/xattr.c ref->xseqno |= XREF_DELETE_MARKER; ref 847 fs/jffs2/xattr.c ref->next = c->xref_dead_list; ref 848 fs/jffs2/xattr.c c->xref_dead_list = ref; ref 852 fs/jffs2/xattr.c ref->xd = xd; ref 853 fs/jffs2/xattr.c ref->ic = ic; ref 855 fs/jffs2/xattr.c ref->next = ic->xref; ref 856 fs/jffs2/xattr.c ic->xref = ref; ref 967 fs/jffs2/xattr.c struct jffs2_xattr_ref *ref, **pref; ref 981 fs/jffs2/xattr.c for (ref=ic->xref, pref=&ic->xref; ref; pref=&ref->next, ref=ref->next) { ref 982 fs/jffs2/xattr.c BUG_ON(ref->ic != ic); ref 983 fs/jffs2/xattr.c xd = ref->xd; ref 994 fs/jffs2/xattr.c *pref = ref->next; ref 995 fs/jffs2/xattr.c delete_xattr_ref(c, ref); ref 1038 fs/jffs2/xattr.c struct jffs2_xattr_ref *ref, **pref; ref 1047 fs/jffs2/xattr.c for (ref=ic->xref, pref=&ic->xref; ref; pref=&ref->next, ref=ref->next) { ref 1048 fs/jffs2/xattr.c BUG_ON(ref->ic!=ic); ref 1050 fs/jffs2/xattr.c xd = ref->xd; ref 1063 fs/jffs2/xattr.c *pref = ref->next; ref 1064 fs/jffs2/xattr.c delete_xattr_ref(c, ref); ref 1100 fs/jffs2/xattr.c struct jffs2_xattr_ref *ref, *newref, **pref; ref 1119 fs/jffs2/xattr.c for (ref=ic->xref, pref=&ic->xref; ref; pref=&ref->next, ref=ref->next) { ref 1120 fs/jffs2/xattr.c xd = ref->xd; ref 1126 fs/jffs2/xattr.c *pref = ref->next; ref 1127 fs/jffs2/xattr.c delete_xattr_ref(c, ref); ref 1138 fs/jffs2/xattr.c ref->ino = ic->ino; ref 1139 fs/jffs2/xattr.c ref->xid = xd->xid; ref 1140 fs/jffs2/xattr.c ref->xseqno |= XREF_DELETE_MARKER; ref 1141 fs/jffs2/xattr.c rc = save_xattr_ref(c, ref); ref 1143 fs/jffs2/xattr.c *pref = ref->next; ref 1145 fs/jffs2/xattr.c ref->next = c->xref_dead_list; ref 1146 fs/jffs2/xattr.c c->xref_dead_list = ref; ref 1150 fs/jffs2/xattr.c ref->ic = ic; ref 1151 fs/jffs2/xattr.c ref->xd = xd; ref 1152 fs/jffs2/xattr.c ref->xseqno &= ~XREF_DELETE_MARKER; ref 1188 fs/jffs2/xattr.c if (ref) ref 1189 fs/jffs2/xattr.c *pref = ref->next; ref 1192 fs/jffs2/xattr.c if (ref) { ref 1193 fs/jffs2/xattr.c ref->next = ic->xref; ref 1194 fs/jffs2/xattr.c ic->xref = ref; ref 1198 fs/jffs2/xattr.c } else if (ref) { ref 1199 fs/jffs2/xattr.c delete_xattr_ref(c, ref); ref 1255 fs/jffs2/xattr.c int jffs2_garbage_collect_xattr_ref(struct jffs2_sb_info *c, struct jffs2_xattr_ref *ref, ref 1262 fs/jffs2/xattr.c BUG_ON(!ref->node); ref 1264 fs/jffs2/xattr.c if (ref->node != raw) ref 1266 fs/jffs2/xattr.c if (is_xattr_ref_dead(ref) && (raw->next_in_ino == (void *)ref)) ref 1269 fs/jffs2/xattr.c old_ofs = ref_offset(ref->node); ref 1270 fs/jffs2/xattr.c totlen = ref_totlen(c, c->gcblock, ref->node); ref 1278 fs/jffs2/xattr.c rc = save_xattr_ref(c, ref); ref 1281 fs/jffs2/xattr.c ref->ic->ino, ref->xd->xid, old_ofs, ref_offset(ref->node)); ref 1332 fs/jffs2/xattr.c void jffs2_release_xattr_ref(struct jffs2_sb_info *c, struct jffs2_xattr_ref *ref) ref 1337 fs/jffs2/xattr.c if (ref->node != (void *)ref) ref 1341 fs/jffs2/xattr.c if (ref == tmp) { ref 1346 fs/jffs2/xattr.c jffs2_free_xattr_ref(ref); ref 66 fs/jffs2/xattr.h static inline int is_xattr_ref_dead(struct jffs2_xattr_ref *ref) ref 68 fs/jffs2/xattr.h return ((ref->xseqno & XREF_DELETE_MARKER) != 0); ref 86 fs/jffs2/xattr.h extern int jffs2_garbage_collect_xattr_ref(struct jffs2_sb_info *c, struct jffs2_xattr_ref *ref, ref 90 fs/jffs2/xattr.h extern void jffs2_release_xattr_ref(struct jffs2_sb_info *c, struct jffs2_xattr_ref *ref); ref 471 fs/nfs/callback_proc.c struct referring_call *ref; ref 488 fs/nfs/callback_proc.c ref = &rclist->rcl_refcalls[j]; ref 490 fs/nfs/callback_proc.c status = nfs4_slot_wait_on_seqid(tbl, ref->rc_slotid, ref 491 fs/nfs/callback_proc.c ref->rc_sequenceid, HZ >> 1) < 0; ref 115 fs/nfs/dns_resolve.c static void nfs_dns_ent_put(struct kref *ref) ref 119 fs/nfs/dns_resolve.c item = container_of(ref, struct nfs_dns_ent, h.ref); ref 192 fs/nfs/flexfilelayout/flexfilelayout.c if (refcount_inc_not_zero(&pos->ref)) { ref 223 fs/nfs/flexfilelayout/flexfilelayout.c refcount_set(&mirror->ref, 1); ref 245 fs/nfs/flexfilelayout/flexfilelayout.c if (mirror != NULL && refcount_dec_and_test(&mirror->ref)) ref 2432 fs/nfs/flexfilelayout/flexfilelayout.c if (!refcount_inc_not_zero(&mirror->ref)) ref 86 fs/nfs/flexfilelayout/flexfilelayout.h refcount_t ref; ref 346 fs/nfs/pnfs.h atomic_t ref; ref 411 fs/nfs/pnfs.h atomic_inc(&d->ref); ref 86 fs/nfs/pnfs_dev.c if (atomic_read(&d->ref)) ref 179 fs/nfs/pnfs_dev.c if (d != NULL && !atomic_inc_not_zero(&d->ref)) ref 209 fs/nfs/pnfs_dev.c atomic_inc(&new->ref); ref 257 fs/nfs/pnfs_dev.c atomic_set(&d->ref, 1); ref 275 fs/nfs/pnfs_dev.c if (atomic_add_unless(&d->ref, -1, 2)) ref 279 fs/nfs/pnfs_dev.c if (!atomic_dec_and_test(&d->ref)) ref 332 fs/nfs/pnfs_dev.c if (d->nfs_client == clp && atomic_read(&d->ref)) { ref 42 fs/nfsd/export.c static void expkey_put(struct kref *ref) ref 44 fs/nfsd/export.c struct svc_expkey *key = container_of(ref, struct svc_expkey, h.ref); ref 210 fs/nfsd/export.c kref_get(&item->ek_client->ref); ref 323 fs/nfsd/export.c static void svc_export_put(struct kref *ref) ref 325 fs/nfsd/export.c struct svc_export *exp = container_of(ref, struct svc_export, h.ref); ref 717 fs/nfsd/export.c kref_get(&item->ex_client->ref); ref 90 fs/nfsd/nfs4idmap.c ent_put(struct kref *ref) ref 92 fs/nfsd/nfs4idmap.c struct ent *map = container_of(ref, struct ent, h.ref); ref 5411 fs/nfsd/nfs4state.c static __be32 check_stateid_generation(stateid_t *in, stateid_t *ref, bool has_session) ref 5420 fs/nfsd/nfs4state.c if (in->si_generation == ref->si_generation) ref 5424 fs/nfsd/nfs4state.c if (nfsd4_stateid_generation_after(in, ref)) ref 6042 fs/ocfs2/xattr.c struct ocfs2_xattr_tree_value_refcount_para *ref = ref 6081 fs/ocfs2/xattr.c &et, ref->ref_ci, ref 6082 fs/ocfs2/xattr.c ref->ref_root_bh, ref 6083 fs/ocfs2/xattr.c ref->dealloc, p); ref 1044 fs/orangefs/inode.c static inline ino_t orangefs_handle_hash(struct orangefs_object_kref *ref) ref 1046 fs/orangefs/inode.c if (!ref) ref 1048 fs/orangefs/inode.c return orangefs_khandle_to_ino(&(ref->khandle)); ref 1056 fs/orangefs/inode.c struct orangefs_object_kref *ref = (struct orangefs_object_kref *) data; ref 1057 fs/orangefs/inode.c ORANGEFS_I(inode)->refn.fs_id = ref->fs_id; ref 1058 fs/orangefs/inode.c ORANGEFS_I(inode)->refn.khandle = ref->khandle; ref 1071 fs/orangefs/inode.c struct orangefs_object_kref *ref = (struct orangefs_object_kref *) data; ref 1077 fs/orangefs/inode.c &(ref->khandle)) && ref 1078 fs/orangefs/inode.c orangefs_inode->refn.fs_id == ref->fs_id); ref 1089 fs/orangefs/inode.c struct orangefs_object_kref *ref) ref 1095 fs/orangefs/inode.c hash = orangefs_handle_hash(ref); ref 1100 fs/orangefs/inode.c ref); ref 1120 fs/orangefs/inode.c &ref->khandle, ref 1121 fs/orangefs/inode.c ref->fs_id, ref 1132 fs/orangefs/inode.c int mode, dev_t dev, struct orangefs_object_kref *ref) ref 1134 fs/orangefs/inode.c unsigned long hash = orangefs_handle_hash(ref); ref 1150 fs/orangefs/inode.c orangefs_set_inode(inode, ref); ref 1160 fs/orangefs/inode.c error = insert_inode_locked4(inode, hash, orangefs_test_inode, ref); ref 25 fs/orangefs/namei.c struct orangefs_object_kref ref; ref 60 fs/orangefs/namei.c ref = new_op->downcall.resp.create.refn; ref 62 fs/orangefs/namei.c inode = orangefs_new_inode(dir->i_sb, dir, S_IFREG | mode, 0, &ref); ref 224 fs/orangefs/namei.c struct orangefs_object_kref ref; ref 267 fs/orangefs/namei.c ref = new_op->downcall.resp.sym.refn; ref 269 fs/orangefs/namei.c inode = orangefs_new_inode(dir->i_sb, dir, S_IFLNK | mode, 0, &ref); ref 310 fs/orangefs/namei.c struct orangefs_object_kref ref; ref 341 fs/orangefs/namei.c ref = new_op->downcall.resp.mkdir.refn; ref 343 fs/orangefs/namei.c inode = orangefs_new_inode(dir->i_sb, dir, S_IFDIR | mode, 0, &ref); ref 363 fs/orangefs/orangefs-kernel.h struct orangefs_object_kref *ref); ref 384 fs/orangefs/orangefs-kernel.h struct orangefs_object_kref *ref); ref 303 fs/quota/quota_tree.c __le32 *ref; ref 323 fs/quota/quota_tree.c ref = (__le32 *)buf; ref 324 fs/quota/quota_tree.c newblk = le32_to_cpu(ref[get_index(info, dquot->dq_id, depth)]); ref 332 fs/quota/quota_tree.c le32_to_cpu(ref[get_index(info, ref 343 fs/quota/quota_tree.c ref[get_index(info, dquot->dq_id, depth)] = ref 480 fs/quota/quota_tree.c __le32 *ref = (__le32 *)buf; ref 490 fs/quota/quota_tree.c newblk = le32_to_cpu(ref[get_index(info, dquot->dq_id, depth)]); ref 499 fs/quota/quota_tree.c ref[get_index(info, dquot->dq_id, depth)] = cpu_to_le32(0); ref 501 fs/quota/quota_tree.c for (i = 0; i < (info->dqi_usable_bs >> 2) && !ref[i]; i++) ref 576 fs/quota/quota_tree.c __le32 *ref = (__le32 *)buf; ref 587 fs/quota/quota_tree.c blk = le32_to_cpu(ref[get_index(info, dquot->dq_id, depth)]); ref 683 fs/quota/quota_tree.c __le32 *ref = (__le32 *)buf; ref 702 fs/quota/quota_tree.c if (ref[i] == cpu_to_le32(0)) { ref 710 fs/quota/quota_tree.c ret = find_next_id(info, id, le32_to_cpu(ref[i]), depth + 1); ref 401 fs/ubifs/debug.c const struct ubifs_ref_node *ref = node; ref 403 fs/ubifs/debug.c pr_err("\tlnum %u\n", le32_to_cpu(ref->lnum)); ref 404 fs/ubifs/debug.c pr_err("\toffs %u\n", le32_to_cpu(ref->offs)); ref 405 fs/ubifs/debug.c pr_err("\tjhead %u\n", le32_to_cpu(ref->jhead)); ref 168 fs/ubifs/log.c struct ubifs_ref_node *ref; ref 173 fs/ubifs/log.c ref = kzalloc(c->ref_node_alsz, GFP_NOFS); ref 174 fs/ubifs/log.c if (!ref) { ref 229 fs/ubifs/log.c ref->ch.node_type = UBIFS_REF_NODE; ref 230 fs/ubifs/log.c ref->lnum = cpu_to_le32(bud->lnum); ref 231 fs/ubifs/log.c ref->offs = cpu_to_le32(bud->start); ref 232 fs/ubifs/log.c ref->jhead = cpu_to_le32(jhead); ref 262 fs/ubifs/log.c err = ubifs_write_node(c, ref, UBIFS_REF_NODE_SZ, c->lhead_lnum, ref 267 fs/ubifs/log.c err = ubifs_shash_update(c, c->log_hash, ref, UBIFS_REF_NODE_SZ); ref 280 fs/ubifs/log.c kfree(ref); ref 285 fs/ubifs/log.c kfree(ref); ref 360 fs/ubifs/log.c struct ubifs_ref_node *ref; ref 402 fs/ubifs/log.c ref = buf + len; ref 403 fs/ubifs/log.c ref->ch.node_type = UBIFS_REF_NODE; ref 404 fs/ubifs/log.c ref->lnum = cpu_to_le32(lnum); ref 405 fs/ubifs/log.c ref->offs = cpu_to_le32(offs); ref 406 fs/ubifs/log.c ref->jhead = cpu_to_le32(i); ref 408 fs/ubifs/log.c ubifs_prepare_node(c, ref, UBIFS_REF_NODE_SZ, 0); ref 411 fs/ubifs/log.c err = ubifs_shash_update(c, c->log_hash, ref, ref 666 fs/ubifs/log.c struct ubifs_ref_node *ref = snod->node; ref 667 fs/ubifs/log.c int ref_lnum = le32_to_cpu(ref->lnum); ref 949 fs/ubifs/replay.c static int validate_ref(struct ubifs_info *c, const struct ubifs_ref_node *ref) ref 952 fs/ubifs/replay.c int lnum = le32_to_cpu(ref->lnum); ref 953 fs/ubifs/replay.c unsigned int offs = le32_to_cpu(ref->offs); ref 954 fs/ubifs/replay.c unsigned int jhead = le32_to_cpu(ref->jhead); ref 1088 fs/ubifs/replay.c const struct ubifs_ref_node *ref = snod->node; ref 1090 fs/ubifs/replay.c err = validate_ref(c, ref); ref 1096 fs/ubifs/replay.c err = ubifs_shash_update(c, c->log_hash, ref, ref 1101 fs/ubifs/replay.c err = add_replay_bud(c, le32_to_cpu(ref->lnum), ref 1102 fs/ubifs/replay.c le32_to_cpu(ref->offs), ref 1103 fs/ubifs/replay.c le32_to_cpu(ref->jhead), ref 43 fs/xfs/libxfs/xfs_sb.c int ref = 0; ref 49 fs/xfs/libxfs/xfs_sb.c ref = atomic_inc_return(&pag->pag_ref); ref 52 fs/xfs/libxfs/xfs_sb.c trace_xfs_perag_get(mp, agno, ref, _RET_IP_); ref 67 fs/xfs/libxfs/xfs_sb.c int ref; ref 76 fs/xfs/libxfs/xfs_sb.c ref = atomic_inc_return(&pag->pag_ref); ref 78 fs/xfs/libxfs/xfs_sb.c trace_xfs_perag_get_tag(mp, pag->pag_agno, ref, _RET_IP_); ref 86 fs/xfs/libxfs/xfs_sb.c int ref; ref 89 fs/xfs/libxfs/xfs_sb.c ref = atomic_dec_return(&pag->pag_ref); ref 90 fs/xfs/libxfs/xfs_sb.c trace_xfs_perag_put(pag->pag_mount, pag->pag_agno, ref, _RET_IP_); ref 276 include/asm-generic/vmlinux.lds.h *(.ref.data) \ ref 482 include/asm-generic/vmlinux.lds.h *(.ref.rodata) \ ref 523 include/asm-generic/vmlinux.lds.h *(.ref.text) \ ref 85 include/drm/drm_atomic.h struct kref ref; ref 317 include/drm/drm_atomic.h struct kref ref; ref 384 include/drm/drm_atomic.h kref_get(&commit->ref); ref 397 include/drm/drm_atomic.h kref_put(&commit->ref, __drm_crtc_commit_free); ref 413 include/drm/drm_atomic.h kref_get(&state->ref); ref 417 include/drm/drm_atomic.h void __drm_atomic_state_free(struct kref *ref); ref 428 include/drm/drm_atomic.h kref_put(&state->ref, __drm_atomic_state_free); ref 65 include/drm/drm_device.h struct kref ref; ref 232 include/linux/blkdev.h refcount_t ref; ref 20 include/linux/ceph/string_table.h extern void ceph_release_string(struct kref *ref); ref 19 include/linux/dim.h #define IS_SIGNIFICANT_DIFF(val, ref) \ ref 20 include/linux/dim.h (((100UL * abs((val) - (ref))) / (ref)) > 10) ref 132 include/linux/genhd.h struct percpu_ref ref; ref 681 include/linux/genhd.h if (percpu_ref_init(&part->ref, __delete_partition, 0, ref 689 include/linux/genhd.h percpu_ref_get(&part->ref); ref 694 include/linux/genhd.h return percpu_ref_tryget_live(&part->ref); ref 699 include/linux/genhd.h percpu_ref_put(&part->ref); ref 704 include/linux/genhd.h percpu_ref_kill(&part->ref); ref 711 include/linux/genhd.h percpu_ref_exit(&part->ref); ref 184 include/linux/host1x.h struct kref ref; ref 51 include/linux/hugetlb.h void resv_map_release(struct kref *ref); ref 51 include/linux/hw_random.h struct kref ref; ref 133 include/linux/iio/buffer_impl.h struct kref ref; ref 73 include/linux/init.h #define __ref __section(.ref.text) noinline ref 74 include/linux/init.h #define __refdata __section(.ref.data) ref 75 include/linux/init.h #define __refconst __section(.ref.rodata) ref 260 include/linux/interrupt.h void (*release)(struct kref *ref); ref 429 include/linux/lightnvm.h struct kref ref; ref 267 include/linux/lockd/lockd.h typedef int (*nlm_host_match_fn_t)(void *cur, struct nlm_host *ref); ref 72 include/linux/memcontrol.h refcount_t ref; ref 110 include/linux/memremap.h struct percpu_ref *ref; ref 173 include/linux/memremap.h percpu_ref_put(pgmap->ref); ref 29 include/linux/mtd/blktrans.h struct kref ref; ref 244 include/linux/netfilter/ipset/ip_set.h u32 ref; ref 109 include/linux/percpu-refcount.h int __must_check percpu_ref_init(struct percpu_ref *ref, ref 112 include/linux/percpu-refcount.h void percpu_ref_exit(struct percpu_ref *ref); ref 113 include/linux/percpu-refcount.h void percpu_ref_switch_to_atomic(struct percpu_ref *ref, ref 115 include/linux/percpu-refcount.h void percpu_ref_switch_to_atomic_sync(struct percpu_ref *ref); ref 116 include/linux/percpu-refcount.h void percpu_ref_switch_to_percpu(struct percpu_ref *ref); ref 117 include/linux/percpu-refcount.h void percpu_ref_kill_and_confirm(struct percpu_ref *ref, ref 119 include/linux/percpu-refcount.h void percpu_ref_resurrect(struct percpu_ref *ref); ref 120 include/linux/percpu-refcount.h void percpu_ref_reinit(struct percpu_ref *ref); ref 134 include/linux/percpu-refcount.h static inline void percpu_ref_kill(struct percpu_ref *ref) ref 136 include/linux/percpu-refcount.h percpu_ref_kill_and_confirm(ref, NULL); ref 145 include/linux/percpu-refcount.h static inline bool __ref_is_percpu(struct percpu_ref *ref, ref 161 include/linux/percpu-refcount.h percpu_ptr = READ_ONCE(ref->percpu_count_ptr); ref 185 include/linux/percpu-refcount.h static inline void percpu_ref_get_many(struct percpu_ref *ref, unsigned long nr) ref 191 include/linux/percpu-refcount.h if (__ref_is_percpu(ref, &percpu_count)) ref 194 include/linux/percpu-refcount.h atomic_long_add(nr, &ref->count); ref 207 include/linux/percpu-refcount.h static inline void percpu_ref_get(struct percpu_ref *ref) ref 209 include/linux/percpu-refcount.h percpu_ref_get_many(ref, 1); ref 221 include/linux/percpu-refcount.h static inline bool percpu_ref_tryget(struct percpu_ref *ref) ref 228 include/linux/percpu-refcount.h if (__ref_is_percpu(ref, &percpu_count)) { ref 232 include/linux/percpu-refcount.h ret = atomic_long_inc_not_zero(&ref->count); ref 255 include/linux/percpu-refcount.h static inline bool percpu_ref_tryget_live(struct percpu_ref *ref) ref 262 include/linux/percpu-refcount.h if (__ref_is_percpu(ref, &percpu_count)) { ref 265 include/linux/percpu-refcount.h } else if (!(ref->percpu_count_ptr & __PERCPU_REF_DEAD)) { ref 266 include/linux/percpu-refcount.h ret = atomic_long_inc_not_zero(&ref->count); ref 284 include/linux/percpu-refcount.h static inline void percpu_ref_put_many(struct percpu_ref *ref, unsigned long nr) ref 290 include/linux/percpu-refcount.h if (__ref_is_percpu(ref, &percpu_count)) ref 292 include/linux/percpu-refcount.h else if (unlikely(atomic_long_sub_and_test(nr, &ref->count))) ref 293 include/linux/percpu-refcount.h ref->release(ref); ref 307 include/linux/percpu-refcount.h static inline void percpu_ref_put(struct percpu_ref *ref) ref 309 include/linux/percpu-refcount.h percpu_ref_put_many(ref, 1); ref 321 include/linux/percpu-refcount.h static inline bool percpu_ref_is_dying(struct percpu_ref *ref) ref 323 include/linux/percpu-refcount.h return ref->percpu_count_ptr & __PERCPU_REF_DEAD; ref 334 include/linux/percpu-refcount.h static inline bool percpu_ref_is_zero(struct percpu_ref *ref) ref 338 include/linux/percpu-refcount.h if (__ref_is_percpu(ref, &percpu_count)) ref 340 include/linux/percpu-refcount.h return !atomic_long_read(&ref->count); ref 66 include/linux/sched/topology.h atomic_t ref; ref 19 include/linux/sh_clk.h struct kref ref; ref 54 include/linux/sunrpc/cache.h struct kref ref; ref 184 include/linux/sunrpc/cache.h kref_get(&h->ref); ref 190 include/linux/sunrpc/cache.h if (kref_get_unless_zero(&h->ref)) ref 197 include/linux/sunrpc/cache.h if (kref_read(&h->ref) <= 2 && ref 200 include/linux/sunrpc/cache.h kref_put(&h->ref, cd->cache_put); ref 81 include/linux/sunrpc/svcauth.h struct kref ref; ref 324 include/linux/usb.h struct kref ref; /* reference counter */ ref 331 include/linux/usb.h container_of(r, struct usb_interface_cache, ref) ref 57 include/media/v4l2-device.h struct kref ref; ref 71 include/media/v4l2-device.h kref_get(&v4l2_dev->ref); ref 187 include/net/act_api.h int ref); ref 659 include/net/bluetooth/l2cap.h struct kref ref; ref 1486 include/rdma/ib_verbs.h struct kref ref; ref 220 include/rdma/rdma_vt.h struct kref ref; ref 748 include/trace/events/btrfs.h const struct btrfs_delayed_ref_node *ref, ref 752 include/trace/events/btrfs.h TP_ARGS(fs_info, ref, full_ref, action), ref 766 include/trace/events/btrfs.h __entry->bytenr = ref->bytenr; ref 767 include/trace/events/btrfs.h __entry->num_bytes = ref->num_bytes; ref 772 include/trace/events/btrfs.h __entry->type = ref->type; ref 773 include/trace/events/btrfs.h __entry->seq = ref->seq; ref 791 include/trace/events/btrfs.h const struct btrfs_delayed_ref_node *ref, ref 795 include/trace/events/btrfs.h TP_ARGS(fs_info, ref, full_ref, action) ref 801 include/trace/events/btrfs.h const struct btrfs_delayed_ref_node *ref, ref 805 include/trace/events/btrfs.h TP_ARGS(fs_info, ref, full_ref, action) ref 811 include/trace/events/btrfs.h const struct btrfs_delayed_ref_node *ref, ref 815 include/trace/events/btrfs.h TP_ARGS(fs_info, ref, full_ref, action), ref 830 include/trace/events/btrfs.h __entry->bytenr = ref->bytenr; ref 831 include/trace/events/btrfs.h __entry->num_bytes = ref->num_bytes; ref 837 include/trace/events/btrfs.h __entry->type = ref->type; ref 838 include/trace/events/btrfs.h __entry->seq = ref->seq; ref 858 include/trace/events/btrfs.h const struct btrfs_delayed_ref_node *ref, ref 862 include/trace/events/btrfs.h TP_ARGS(fs_info, ref, full_ref, action) ref 868 include/trace/events/btrfs.h const struct btrfs_delayed_ref_node *ref, ref 872 include/trace/events/btrfs.h TP_ARGS(fs_info, ref, full_ref, action) ref 259 include/trace/events/kvm.h TP_PROTO(ulong gfn, int level, struct kvm_memory_slot *slot, int ref), ref 260 include/trace/events/kvm.h TP_ARGS(gfn, level, slot, ref), ref 274 include/trace/events/kvm.h __entry->referenced = ref; ref 49 include/uapi/linux/tipc.h __u32 ref; ref 44 include/uapi/xen/gntdev.h __u32 ref; ref 153 include/uapi/xen/gntdev.h grant_ref_t ref; ref 93 include/xen/grant_table.h int gnttab_end_foreign_access_ref(grant_ref_t ref, int readonly); ref 101 include/xen/grant_table.h void gnttab_end_foreign_access(grant_ref_t ref, int readonly, ref 106 include/xen/grant_table.h unsigned long gnttab_end_foreign_transfer_ref(grant_ref_t ref); ref 107 include/xen/grant_table.h unsigned long gnttab_end_foreign_transfer(grant_ref_t ref); ref 109 include/xen/grant_table.h int gnttab_query_foreign_access(grant_ref_t ref); ref 116 include/xen/grant_table.h void gnttab_free_grant_reference(grant_ref_t ref); ref 131 include/xen/grant_table.h void gnttab_grant_foreign_access_ref(grant_ref_t ref, domid_t domid, ref 136 include/xen/grant_table.h grant_ref_t ref, domid_t domid, ref 139 include/xen/grant_table.h gnttab_grant_foreign_access_ref(ref, domid, xen_page_to_gfn(page), ref 148 include/xen/grant_table.h uint32_t flags, grant_ref_t ref, domid_t domid) ref 158 include/xen/grant_table.h map->ref = ref; ref 266 include/xen/interface/grant_table.h grant_ref_t ref; ref 343 include/xen/interface/grant_table.h grant_ref_t ref; ref 377 include/xen/interface/grant_table.h grant_ref_t ref; ref 489 include/xen/interface/grant_table.h grant_ref_t ref; ref 25 include/xen/interface/io/pvcalls.h grant_ref_t ref[]; ref 52 include/xen/interface/io/pvcalls.h grant_ref_t ref; ref 71 include/xen/interface/io/pvcalls.h grant_ref_t ref; ref 428 include/xen/interface/io/ring.h grant_ref_t ref[]; \ ref 44 kernel/bpf/bpf_lru_list.c return node->ref; ref 92 kernel/bpf/bpf_lru_list.c node->ref = 0; ref 113 kernel/bpf/bpf_lru_list.c node->ref = 0; ref 356 kernel/bpf/bpf_lru_list.c node->ref = 0; ref 422 kernel/bpf/bpf_lru_list.c node->ref = 0; ref 524 kernel/bpf/bpf_lru_list.c node->ref = 0; ref 570 kernel/bpf/bpf_lru_list.c node->ref = 0; ref 596 kernel/bpf/bpf_lru_list.c node->ref = 0; ref 27 kernel/bpf/bpf_lru_list.h u8 ref; ref 69 kernel/bpf/bpf_lru_list.h if (!node->ref) ref 70 kernel/bpf/bpf_lru_list.h node->ref = 1; ref 80 kernel/bpf/cgroup.c static void cgroup_bpf_release_fn(struct percpu_ref *ref) ref 82 kernel/bpf/cgroup.c struct cgroup *cgrp = container_of(ref, struct cgroup, bpf.refcnt); ref 555 kernel/bpf/hashtab.c offsetof(struct bpf_lru_node, ref)); ref 559 kernel/bpf/hashtab.c offsetof(struct bpf_lru_node, ref), ref 223 kernel/cgroup/cgroup.c static void css_release(struct percpu_ref *ref); ref 5075 kernel/cgroup/cgroup.c static void css_release(struct percpu_ref *ref) ref 5078 kernel/cgroup/cgroup.c container_of(ref, struct cgroup_subsys_state, refcnt); ref 5458 kernel/cgroup/cgroup.c static void css_killed_ref_fn(struct percpu_ref *ref) ref 5461 kernel/cgroup/cgroup.c container_of(ref, struct cgroup_subsys_state, refcnt); ref 292 kernel/dma/debug.c struct dma_debug_entry *ref, ref 299 kernel/dma/debug.c if (!match(ref, entry)) ref 314 kernel/dma/debug.c entry->size == ref->size ? ++match_lvl : 0; ref 315 kernel/dma/debug.c entry->type == ref->type ? ++match_lvl : 0; ref 316 kernel/dma/debug.c entry->direction == ref->direction ? ++match_lvl : 0; ref 317 kernel/dma/debug.c entry->sg_call_ents == ref->sg_call_ents ? ++match_lvl : 0; ref 342 kernel/dma/debug.c struct dma_debug_entry *ref) ref 344 kernel/dma/debug.c return __hash_bucket_find(bucket, ref, exact_match); ref 348 kernel/dma/debug.c struct dma_debug_entry *ref, ref 352 kernel/dma/debug.c unsigned int max_range = dma_get_max_seg_size(ref->dev); ref 353 kernel/dma/debug.c struct dma_debug_entry *entry, index = *ref; ref 357 kernel/dma/debug.c entry = __hash_bucket_find(*bucket, ref, containing_match); ref 997 kernel/dma/debug.c static void check_unmap(struct dma_debug_entry *ref) ref 1003 kernel/dma/debug.c bucket = get_hash_bucket(ref, &flags); ref 1004 kernel/dma/debug.c entry = bucket_find_exact(bucket, ref); ref 1010 kernel/dma/debug.c if (dma_mapping_error(ref->dev, ref->dev_addr)) { ref 1011 kernel/dma/debug.c err_printk(ref->dev, NULL, ref 1015 kernel/dma/debug.c err_printk(ref->dev, NULL, ref 1019 kernel/dma/debug.c ref->dev_addr, ref->size); ref 1024 kernel/dma/debug.c if (ref->size != entry->size) { ref 1025 kernel/dma/debug.c err_printk(ref->dev, entry, "device driver frees " ref 1029 kernel/dma/debug.c ref->dev_addr, entry->size, ref->size); ref 1032 kernel/dma/debug.c if (ref->type != entry->type) { ref 1033 kernel/dma/debug.c err_printk(ref->dev, entry, "device driver frees " ref 1037 kernel/dma/debug.c ref->dev_addr, ref->size, ref 1038 kernel/dma/debug.c type2name[entry->type], type2name[ref->type]); ref 1040 kernel/dma/debug.c (phys_addr(ref) != phys_addr(entry))) { ref 1041 kernel/dma/debug.c err_printk(ref->dev, entry, "device driver frees " ref 1046 kernel/dma/debug.c ref->dev_addr, ref->size, ref 1048 kernel/dma/debug.c phys_addr(ref)); ref 1051 kernel/dma/debug.c if (ref->sg_call_ents && ref->type == dma_debug_sg && ref 1052 kernel/dma/debug.c ref->sg_call_ents != entry->sg_call_ents) { ref 1053 kernel/dma/debug.c err_printk(ref->dev, entry, "device driver frees " ref 1056 kernel/dma/debug.c entry->sg_call_ents, ref->sg_call_ents); ref 1063 kernel/dma/debug.c if (ref->direction != entry->direction) { ref 1064 kernel/dma/debug.c err_printk(ref->dev, entry, "device driver frees " ref 1068 kernel/dma/debug.c ref->dev_addr, ref->size, ref 1070 kernel/dma/debug.c dir2name[ref->direction]); ref 1079 kernel/dma/debug.c err_printk(ref->dev, entry, ref 1083 kernel/dma/debug.c ref->dev_addr, ref->size, ref 1139 kernel/dma/debug.c struct dma_debug_entry *ref, ref 1146 kernel/dma/debug.c bucket = get_hash_bucket(ref, &flags); ref 1148 kernel/dma/debug.c entry = bucket_find_contain(&bucket, ref, &flags); ref 1154 kernel/dma/debug.c (unsigned long long)ref->dev_addr, ref->size); ref 1158 kernel/dma/debug.c if (ref->size > entry->size) { ref 1165 kernel/dma/debug.c ref->size); ref 1171 kernel/dma/debug.c if (ref->direction != entry->direction) { ref 1176 kernel/dma/debug.c (unsigned long long)ref->dev_addr, entry->size, ref 1178 kernel/dma/debug.c dir2name[ref->direction]); ref 1182 kernel/dma/debug.c !(ref->direction == DMA_TO_DEVICE)) ref 1187 kernel/dma/debug.c (unsigned long long)ref->dev_addr, entry->size, ref 1189 kernel/dma/debug.c dir2name[ref->direction]); ref 1192 kernel/dma/debug.c !(ref->direction == DMA_FROM_DEVICE)) ref 1197 kernel/dma/debug.c (unsigned long long)ref->dev_addr, entry->size, ref 1199 kernel/dma/debug.c dir2name[ref->direction]); ref 1201 kernel/dma/debug.c if (ref->sg_call_ents && ref->type == dma_debug_sg && ref 1202 kernel/dma/debug.c ref->sg_call_ents != entry->sg_call_ents) { ref 1203 kernel/dma/debug.c err_printk(ref->dev, entry, "device driver syncs " ref 1206 kernel/dma/debug.c entry->sg_call_ents, ref->sg_call_ents); ref 1293 kernel/dma/debug.c struct dma_debug_entry ref; ref 1301 kernel/dma/debug.c ref.dev = dev; ref 1302 kernel/dma/debug.c ref.dev_addr = dma_addr; ref 1303 kernel/dma/debug.c bucket = get_hash_bucket(&ref, &flags); ref 1306 kernel/dma/debug.c if (!exact_match(&ref, entry)) ref 1332 kernel/dma/debug.c struct dma_debug_entry ref = { ref 1342 kernel/dma/debug.c check_unmap(&ref); ref 1385 kernel/dma/debug.c struct dma_debug_entry *ref) ref 1392 kernel/dma/debug.c bucket = get_hash_bucket(ref, &flags); ref 1393 kernel/dma/debug.c entry = bucket_find_exact(bucket, ref); ref 1414 kernel/dma/debug.c struct dma_debug_entry ref = { ref 1429 kernel/dma/debug.c mapped_ents = get_nr_mapped_entries(dev, &ref); ref 1431 kernel/dma/debug.c check_unmap(&ref); ref 1473 kernel/dma/debug.c struct dma_debug_entry ref = { ref 1487 kernel/dma/debug.c ref.pfn = vmalloc_to_pfn(virt); ref 1489 kernel/dma/debug.c ref.pfn = page_to_pfn(virt_to_page(virt)); ref 1494 kernel/dma/debug.c check_unmap(&ref); ref 1525 kernel/dma/debug.c struct dma_debug_entry ref = { ref 1536 kernel/dma/debug.c check_unmap(&ref); ref 1543 kernel/dma/debug.c struct dma_debug_entry ref; ref 1548 kernel/dma/debug.c ref.type = dma_debug_single; ref 1549 kernel/dma/debug.c ref.dev = dev; ref 1550 kernel/dma/debug.c ref.dev_addr = dma_handle; ref 1551 kernel/dma/debug.c ref.size = size; ref 1552 kernel/dma/debug.c ref.direction = direction; ref 1553 kernel/dma/debug.c ref.sg_call_ents = 0; ref 1555 kernel/dma/debug.c check_sync(dev, &ref, true); ref 1563 kernel/dma/debug.c struct dma_debug_entry ref; ref 1568 kernel/dma/debug.c ref.type = dma_debug_single; ref 1569 kernel/dma/debug.c ref.dev = dev; ref 1570 kernel/dma/debug.c ref.dev_addr = dma_handle; ref 1571 kernel/dma/debug.c ref.size = size; ref 1572 kernel/dma/debug.c ref.direction = direction; ref 1573 kernel/dma/debug.c ref.sg_call_ents = 0; ref 1575 kernel/dma/debug.c check_sync(dev, &ref, false); ref 1590 kernel/dma/debug.c struct dma_debug_entry ref = { ref 1602 kernel/dma/debug.c mapped_ents = get_nr_mapped_entries(dev, &ref); ref 1607 kernel/dma/debug.c check_sync(dev, &ref, true); ref 1623 kernel/dma/debug.c struct dma_debug_entry ref = { ref 1634 kernel/dma/debug.c mapped_ents = get_nr_mapped_entries(dev, &ref); ref 1639 kernel/dma/debug.c check_sync(dev, &ref, false); ref 57 kernel/events/uprobes.c refcount_t ref; ref 603 kernel/events/uprobes.c refcount_inc(&uprobe->ref); ref 609 kernel/events/uprobes.c if (refcount_dec_and_test(&uprobe->ref)) { ref 700 kernel/events/uprobes.c refcount_set(&uprobe->ref, 2); ref 7992 kernel/sched/fair.c group_smaller_min_cpu_capacity(struct sched_group *sg, struct sched_group *ref) ref 7994 kernel/sched/fair.c return fits_capacity(sg->sgc->min_capacity, ref->sgc->min_capacity); ref 8002 kernel/sched/fair.c group_smaller_max_cpu_capacity(struct sched_group *sg, struct sched_group *ref) ref 8004 kernel/sched/fair.c return fits_capacity(sg->sgc->max_capacity, ref->sgc->max_capacity); ref 1082 kernel/sched/psi.c static void psi_trigger_destroy(struct kref *ref) ref 1084 kernel/sched/psi.c struct psi_trigger *t = container_of(ref, struct psi_trigger, refcount); ref 1392 kernel/sched/sched.h atomic_t ref; ref 1412 kernel/sched/sched.h atomic_t ref; ref 569 kernel/sched/topology.c if (free_sgc && atomic_dec_and_test(&sg->sgc->ref)) ref 572 kernel/sched/topology.c if (atomic_dec_and_test(&sg->ref)) ref 587 kernel/sched/topology.c if (sd->shared && atomic_dec_and_test(&sd->shared->ref)) ref 894 kernel/sched/topology.c atomic_inc(&sg->ref); ref 910 kernel/sched/topology.c if (atomic_inc_return(&sg->sgc->ref) == 1) ref 1071 kernel/sched/topology.c already_visited = atomic_inc_return(&sg->ref) > 1; ref 1073 kernel/sched/topology.c WARN_ON(already_visited != (atomic_inc_return(&sg->sgc->ref) > 1)); ref 1268 kernel/sched/topology.c if (atomic_read(&(*per_cpu_ptr(sdd->sds, cpu))->ref)) ref 1271 kernel/sched/topology.c if (atomic_read(&(*per_cpu_ptr(sdd->sg, cpu))->ref)) ref 1274 kernel/sched/topology.c if (atomic_read(&(*per_cpu_ptr(sdd->sgc, cpu))->ref)) ref 1423 kernel/sched/topology.c atomic_inc(&sd->shared->ref); ref 1032 kernel/trace/ftrace.c int ref; ref 4281 kernel/trace/ftrace.c WARN_ON(probe->ref <= 0); ref 4284 kernel/trace/ftrace.c probe->ref--; ref 4286 kernel/trace/ftrace.c if (!probe->ref) { ref 4306 kernel/trace/ftrace.c probe->ref++; ref 4419 kernel/trace/ftrace.c probe->ref += count; ref 4544 kernel/trace/ftrace.c WARN_ON(probe->ref < count); ref 4546 kernel/trace/ftrace.c probe->ref -= count; ref 285 kernel/trace/trace.c tr->ref++; ref 297 kernel/trace/trace.c WARN_ON(!this_tr->ref); ref 298 kernel/trace/trace.c this_tr->ref--; ref 5689 kernel/trace/trace.c if (tr->current_trace->ref) { ref 5905 kernel/trace/trace.c tr->current_trace->ref++; ref 5924 kernel/trace/trace.c tr->current_trace->ref--; ref 7233 kernel/trace/trace.c tr->current_trace->ref++; ref 7334 kernel/trace/trace.c iter->tr->current_trace->ref--; ref 7355 kernel/trace/trace.c static void buffer_ref_release(struct buffer_ref *ref) ref 7357 kernel/trace/trace.c if (!refcount_dec_and_test(&ref->refcount)) ref 7359 kernel/trace/trace.c ring_buffer_free_read_page(ref->buffer, ref->cpu, ref->page); ref 7360 kernel/trace/trace.c kfree(ref); ref 7366 kernel/trace/trace.c struct buffer_ref *ref = (struct buffer_ref *)buf->private; ref 7368 kernel/trace/trace.c buffer_ref_release(ref); ref 7375 kernel/trace/trace.c struct buffer_ref *ref = (struct buffer_ref *)buf->private; ref 7377 kernel/trace/trace.c if (refcount_read(&ref->refcount) > INT_MAX/2) ref 7380 kernel/trace/trace.c refcount_inc(&ref->refcount); ref 7398 kernel/trace/trace.c struct buffer_ref *ref = ref 7401 kernel/trace/trace.c buffer_ref_release(ref); ref 7421 kernel/trace/trace.c struct buffer_ref *ref; ref 7450 kernel/trace/trace.c ref = kzalloc(sizeof(*ref), GFP_KERNEL); ref 7451 kernel/trace/trace.c if (!ref) { ref 7456 kernel/trace/trace.c refcount_set(&ref->refcount, 1); ref 7457 kernel/trace/trace.c ref->buffer = iter->trace_buffer->buffer; ref 7458 kernel/trace/trace.c ref->page = ring_buffer_alloc_read_page(ref->buffer, iter->cpu_file); ref 7459 kernel/trace/trace.c if (IS_ERR(ref->page)) { ref 7460 kernel/trace/trace.c ret = PTR_ERR(ref->page); ref 7461 kernel/trace/trace.c ref->page = NULL; ref 7462 kernel/trace/trace.c kfree(ref); ref 7465 kernel/trace/trace.c ref->cpu = iter->cpu_file; ref 7467 kernel/trace/trace.c r = ring_buffer_read_page(ref->buffer, &ref->page, ref 7470 kernel/trace/trace.c ring_buffer_free_read_page(ref->buffer, ref->cpu, ref 7471 kernel/trace/trace.c ref->page); ref 7472 kernel/trace/trace.c kfree(ref); ref 7476 kernel/trace/trace.c page = virt_to_page(ref->page); ref 7481 kernel/trace/trace.c spd.partial[i].private = (unsigned long)ref; ref 8473 kernel/trace/trace.c if (tr->ref || (tr->current_trace && tr->current_trace->ref)) ref 311 kernel/trace/trace.h int ref; ref 501 kernel/trace/trace.h int ref; ref 1619 kernel/trace/trace.h int ref; ref 2514 kernel/trace/trace_events.c int ref; ref 2620 kernel/trace/trace_events.c edata->ref++; ref 2629 kernel/trace/trace_events.c edata->ref--; ref 2630 kernel/trace/trace_events.c if (!edata->ref) { ref 2658 kernel/trace/trace_events.c if (WARN_ON_ONCE(edata->ref <= 0)) ref 119 kernel/trace/trace_events_hist.c unsigned int ref; ref 403 kernel/trace/trace_events_hist.c int ref; ref 427 kernel/trace/trace_events_hist.c return event->ref != 0; ref 1397 kernel/trace/trace_events_hist.c if (event->ref) ref 1440 kernel/trace/trace_events_hist.c if (event->ref) ref 2457 kernel/trace/trace_events_hist.c hist_field->ref++; ref 2462 kernel/trace/trace_events_hist.c if (--hist_field->ref > 1) ref 2509 kernel/trace/trace_events_hist.c hist_field->ref = 1; ref 3534 kernel/trace/trace_events_hist.c var->ref = 1; ref 3872 kernel/trace/trace_events_hist.c data->synth_event->ref--; ref 4297 kernel/trace/trace_events_hist.c event->ref++; ref 4376 kernel/trace/trace_events_hist.c event->ref--; ref 5772 kernel/trace/trace_events_hist.c if (!data->ref && hist_data->attrs->name) ref 5775 kernel/trace/trace_events_hist.c data->ref++; ref 5800 kernel/trace/trace_events_hist.c if (WARN_ON_ONCE(data->ref <= 0)) ref 5803 kernel/trace/trace_events_hist.c data->ref--; ref 5804 kernel/trace/trace_events_hist.c if (!data->ref) { ref 5828 kernel/trace/trace_events_hist.c data->ref++; ref 5840 kernel/trace/trace_events_hist.c if (WARN_ON_ONCE(data->ref <= 0)) ref 5845 kernel/trace/trace_events_hist.c data->ref--; ref 5846 kernel/trace/trace_events_hist.c if (!data->ref) { ref 6195 kernel/trace/trace_events_hist.c se->ref--; ref 6316 kernel/trace/trace_events_hist.c se->ref--; ref 6355 kernel/trace/trace_events_hist.c se->ref++; ref 415 kernel/trace/trace_events_trigger.c data->ref++; ref 433 kernel/trace/trace_events_trigger.c if (WARN_ON_ONCE(data->ref <= 0)) ref 436 kernel/trace/trace_events_trigger.c data->ref--; ref 437 kernel/trace/trace_events_trigger.c if (!data->ref) ref 1306 kernel/trace/trace_events_trigger.c if (WARN_ON_ONCE(data->ref <= 0)) ref 1309 kernel/trace/trace_events_trigger.c data->ref--; ref 1310 kernel/trace/trace_events_trigger.c if (!data->ref) { ref 66 lib/cpu_rmap.c static void cpu_rmap_release(struct kref *ref) ref 68 lib/cpu_rmap.c struct cpu_rmap *rmap = container_of(ref, struct cpu_rmap, refcount); ref 265 lib/cpu_rmap.c static void irq_cpu_rmap_release(struct kref *ref) ref 268 lib/cpu_rmap.c container_of(ref, struct irq_glue, notify.kref); ref 359 lib/lz4/lz4hc_compress.c const BYTE *ref = NULL; ref 380 lib/lz4/lz4hc_compress.c matchlimit, (&ref), maxNbAttempts); ref 388 lib/lz4/lz4hc_compress.c ref0 = ref; ref 403 lib/lz4/lz4hc_compress.c &anchor, ml, ref, limit, oend)) ref 412 lib/lz4/lz4hc_compress.c ref = ref0; ref 422 lib/lz4/lz4hc_compress.c ref = ref2; ref 469 lib/lz4/lz4hc_compress.c ml, ref, limit, oend)) ref 499 lib/lz4/lz4hc_compress.c ml, ref, limit, oend)) ref 502 lib/lz4/lz4hc_compress.c ref = ref3; ref 540 lib/lz4/lz4hc_compress.c ref, limit, oend)) ref 544 lib/lz4/lz4hc_compress.c ref = ref2; ref 40 lib/percpu-refcount.c static unsigned long __percpu *percpu_count_ptr(struct percpu_ref *ref) ref 43 lib/percpu-refcount.c (ref->percpu_count_ptr & ~__PERCPU_REF_ATOMIC_DEAD); ref 60 lib/percpu-refcount.c int percpu_ref_init(struct percpu_ref *ref, percpu_ref_func_t *release, ref 67 lib/percpu-refcount.c ref->percpu_count_ptr = (unsigned long) ref 69 lib/percpu-refcount.c if (!ref->percpu_count_ptr) ref 72 lib/percpu-refcount.c ref->force_atomic = flags & PERCPU_REF_INIT_ATOMIC; ref 73 lib/percpu-refcount.c ref->allow_reinit = flags & PERCPU_REF_ALLOW_REINIT; ref 76 lib/percpu-refcount.c ref->percpu_count_ptr |= __PERCPU_REF_ATOMIC; ref 77 lib/percpu-refcount.c ref->allow_reinit = true; ref 83 lib/percpu-refcount.c ref->percpu_count_ptr |= __PERCPU_REF_DEAD; ref 87 lib/percpu-refcount.c atomic_long_set(&ref->count, start_count); ref 89 lib/percpu-refcount.c ref->release = release; ref 90 lib/percpu-refcount.c ref->confirm_switch = NULL; ref 105 lib/percpu-refcount.c void percpu_ref_exit(struct percpu_ref *ref) ref 107 lib/percpu-refcount.c unsigned long __percpu *percpu_count = percpu_count_ptr(ref); ref 111 lib/percpu-refcount.c WARN_ON_ONCE(ref->confirm_switch); ref 113 lib/percpu-refcount.c ref->percpu_count_ptr = __PERCPU_REF_ATOMIC_DEAD; ref 120 lib/percpu-refcount.c struct percpu_ref *ref = container_of(rcu, struct percpu_ref, rcu); ref 122 lib/percpu-refcount.c ref->confirm_switch(ref); ref 123 lib/percpu-refcount.c ref->confirm_switch = NULL; ref 126 lib/percpu-refcount.c if (!ref->allow_reinit) ref 127 lib/percpu-refcount.c percpu_ref_exit(ref); ref 130 lib/percpu-refcount.c percpu_ref_put(ref); ref 135 lib/percpu-refcount.c struct percpu_ref *ref = container_of(rcu, struct percpu_ref, rcu); ref 136 lib/percpu-refcount.c unsigned long __percpu *percpu_count = percpu_count_ptr(ref); ref 144 lib/percpu-refcount.c atomic_long_read(&ref->count), (long)count); ref 158 lib/percpu-refcount.c atomic_long_add((long)count - PERCPU_COUNT_BIAS, &ref->count); ref 160 lib/percpu-refcount.c WARN_ONCE(atomic_long_read(&ref->count) <= 0, ref 162 lib/percpu-refcount.c ref->release, atomic_long_read(&ref->count)); ref 168 lib/percpu-refcount.c static void percpu_ref_noop_confirm_switch(struct percpu_ref *ref) ref 172 lib/percpu-refcount.c static void __percpu_ref_switch_to_atomic(struct percpu_ref *ref, ref 175 lib/percpu-refcount.c if (ref->percpu_count_ptr & __PERCPU_REF_ATOMIC) { ref 177 lib/percpu-refcount.c confirm_switch(ref); ref 182 lib/percpu-refcount.c ref->percpu_count_ptr |= __PERCPU_REF_ATOMIC; ref 188 lib/percpu-refcount.c ref->confirm_switch = confirm_switch ?: percpu_ref_noop_confirm_switch; ref 190 lib/percpu-refcount.c percpu_ref_get(ref); /* put after confirmation */ ref 191 lib/percpu-refcount.c call_rcu(&ref->rcu, percpu_ref_switch_to_atomic_rcu); ref 194 lib/percpu-refcount.c static void __percpu_ref_switch_to_percpu(struct percpu_ref *ref) ref 196 lib/percpu-refcount.c unsigned long __percpu *percpu_count = percpu_count_ptr(ref); ref 201 lib/percpu-refcount.c if (!(ref->percpu_count_ptr & __PERCPU_REF_ATOMIC)) ref 204 lib/percpu-refcount.c if (WARN_ON_ONCE(!ref->allow_reinit)) ref 207 lib/percpu-refcount.c atomic_long_add(PERCPU_COUNT_BIAS, &ref->count); ref 218 lib/percpu-refcount.c smp_store_release(&ref->percpu_count_ptr, ref 219 lib/percpu-refcount.c ref->percpu_count_ptr & ~__PERCPU_REF_ATOMIC); ref 222 lib/percpu-refcount.c static void __percpu_ref_switch_mode(struct percpu_ref *ref, ref 232 lib/percpu-refcount.c wait_event_lock_irq(percpu_ref_switch_waitq, !ref->confirm_switch, ref 235 lib/percpu-refcount.c if (ref->force_atomic || (ref->percpu_count_ptr & __PERCPU_REF_DEAD)) ref 236 lib/percpu-refcount.c __percpu_ref_switch_to_atomic(ref, confirm_switch); ref 238 lib/percpu-refcount.c __percpu_ref_switch_to_percpu(ref); ref 261 lib/percpu-refcount.c void percpu_ref_switch_to_atomic(struct percpu_ref *ref, ref 268 lib/percpu-refcount.c ref->force_atomic = true; ref 269 lib/percpu-refcount.c __percpu_ref_switch_mode(ref, confirm_switch); ref 283 lib/percpu-refcount.c void percpu_ref_switch_to_atomic_sync(struct percpu_ref *ref) ref 285 lib/percpu-refcount.c percpu_ref_switch_to_atomic(ref, NULL); ref 286 lib/percpu-refcount.c wait_event(percpu_ref_switch_waitq, !ref->confirm_switch); ref 308 lib/percpu-refcount.c void percpu_ref_switch_to_percpu(struct percpu_ref *ref) ref 314 lib/percpu-refcount.c ref->force_atomic = false; ref 315 lib/percpu-refcount.c __percpu_ref_switch_mode(ref, NULL); ref 338 lib/percpu-refcount.c void percpu_ref_kill_and_confirm(struct percpu_ref *ref, ref 345 lib/percpu-refcount.c WARN_ONCE(ref->percpu_count_ptr & __PERCPU_REF_DEAD, ref 346 lib/percpu-refcount.c "%s called more than once on %ps!", __func__, ref->release); ref 348 lib/percpu-refcount.c ref->percpu_count_ptr |= __PERCPU_REF_DEAD; ref 349 lib/percpu-refcount.c __percpu_ref_switch_mode(ref, confirm_kill); ref 350 lib/percpu-refcount.c percpu_ref_put(ref); ref 367 lib/percpu-refcount.c void percpu_ref_reinit(struct percpu_ref *ref) ref 369 lib/percpu-refcount.c WARN_ON_ONCE(!percpu_ref_is_zero(ref)); ref 371 lib/percpu-refcount.c percpu_ref_resurrect(ref); ref 389 lib/percpu-refcount.c void percpu_ref_resurrect(struct percpu_ref *ref) ref 396 lib/percpu-refcount.c WARN_ON_ONCE(!(ref->percpu_count_ptr & __PERCPU_REF_DEAD)); ref 397 lib/percpu-refcount.c WARN_ON_ONCE(__ref_is_percpu(ref, &percpu_count)); ref 399 lib/percpu-refcount.c ref->percpu_count_ptr &= ~__PERCPU_REF_DEAD; ref 400 lib/percpu-refcount.c percpu_ref_get(ref); ref 401 lib/percpu-refcount.c __percpu_ref_switch_mode(ref, NULL); ref 1028 mm/backing-dev.c static void release_bdi(struct kref *ref) ref 1031 mm/backing-dev.c container_of(ref, struct backing_dev_info, refcnt); ref 723 mm/hugetlb.c void resv_map_release(struct kref *ref) ref 725 mm/hugetlb.c struct resv_map *resv_map = container_of(ref, struct resv_map, refs); ref 4993 mm/memcontrol.c refcount_add(n, &memcg->id.ref); ref 4998 mm/memcontrol.c if (refcount_sub_and_test(n, &memcg->id.ref)) { ref 5244 mm/memcontrol.c refcount_set(&memcg->id.ref, 1); ref 7009 mm/memcontrol.c while (!refcount_inc_not_zero(&memcg->id.ref)) { ref 84 mm/memremap.c percpu_ref_kill(pgmap->ref); ref 93 mm/memremap.c percpu_ref_exit(pgmap->ref); ref 99 mm/memremap.c if (pgmap->ref == &pgmap->internal_ref) ref 100 mm/memremap.c pgmap->ref = NULL; ref 144 mm/memremap.c static void dev_pagemap_percpu_release(struct percpu_ref *ref) ref 147 mm/memremap.c container_of(ref, struct dev_pagemap, internal_ref); ref 198 mm/memremap.c if (!pgmap->ref) { ref 207 mm/memremap.c pgmap->ref = &pgmap->internal_ref; ref 306 mm/memremap.c percpu_ref_get_many(pgmap->ref, pfn_end(pgmap) - pfn_first(pgmap)); ref 404 mm/memremap.c if (pgmap && !percpu_ref_tryget_live(pgmap->ref)) ref 462 mm/z3fold.c release_z3fold_page(struct kref *ref) ref 464 mm/z3fold.c struct z3fold_header *zhdr = container_of(ref, struct z3fold_header, ref 469 mm/z3fold.c static void release_z3fold_page_locked(struct kref *ref) ref 471 mm/z3fold.c struct z3fold_header *zhdr = container_of(ref, struct z3fold_header, ref 477 mm/z3fold.c static void release_z3fold_page_locked_list(struct kref *ref) ref 479 mm/z3fold.c struct z3fold_header *zhdr = container_of(ref, struct z3fold_header, ref 377 net/9p/client.c static void p9_req_free(struct kref *ref) ref 379 net/9p/client.c struct p9_req_t *r = container_of(ref, struct p9_req_t, refcount); ref 62 net/9p/trans_xen.c grant_ref_t ref; ref 299 net/9p/trans_xen.c grant_ref_t ref; ref 301 net/9p/trans_xen.c ref = priv->rings[i].intf->ref[j]; ref 302 net/9p/trans_xen.c gnttab_end_foreign_access(ref, 0, 0); ref 308 net/9p/trans_xen.c gnttab_end_foreign_access(priv->rings[i].ref, 0, 0); ref 343 net/9p/trans_xen.c ring->ref = ret; ref 355 net/9p/trans_xen.c ring->intf->ref[i] = ret; ref 375 net/9p/trans_xen.c gnttab_end_foreign_access(ring->intf->ref[i], 0, 0); ref 380 net/9p/trans_xen.c gnttab_end_foreign_access(ring->ref, 0, 0); ref 449 net/9p/trans_xen.c priv->rings[i].ref); ref 148 net/batman-adv/bridge_loop_avoidance.c static void batadv_backbone_gw_release(struct kref *ref) ref 152 net/batman-adv/bridge_loop_avoidance.c backbone_gw = container_of(ref, struct batadv_bla_backbone_gw, ref 173 net/batman-adv/bridge_loop_avoidance.c static void batadv_claim_release(struct kref *ref) ref 178 net/batman-adv/bridge_loop_avoidance.c claim = container_of(ref, struct batadv_bla_claim, refcount); ref 115 net/batman-adv/distributed-arp-table.c static void batadv_dat_entry_release(struct kref *ref) ref 119 net/batman-adv/distributed-arp-table.c dat_entry = container_of(ref, struct batadv_dat_entry, refcount); ref 63 net/batman-adv/gateway_client.c static void batadv_gw_node_release(struct kref *ref) ref 67 net/batman-adv/gateway_client.c gw_node = container_of(ref, struct batadv_gw_node, refcount); ref 49 net/batman-adv/hard-interface.c void batadv_hardif_release(struct kref *ref) ref 53 net/batman-adv/hard-interface.c hard_iface = container_of(ref, struct batadv_hard_iface, refcount); ref 106 net/batman-adv/hard-interface.h void batadv_hardif_release(struct kref *ref); ref 206 net/batman-adv/network-coding.c static void batadv_nc_node_release(struct kref *ref) ref 210 net/batman-adv/network-coding.c nc_node = container_of(ref, struct batadv_nc_node, refcount); ref 231 net/batman-adv/network-coding.c static void batadv_nc_path_release(struct kref *ref) ref 235 net/batman-adv/network-coding.c nc_path = container_of(ref, struct batadv_nc_path, refcount); ref 181 net/batman-adv/originator.c static void batadv_orig_node_vlan_release(struct kref *ref) ref 185 net/batman-adv/originator.c orig_vlan = container_of(ref, struct batadv_orig_node_vlan, refcount); ref 235 net/batman-adv/originator.c static void batadv_neigh_ifinfo_release(struct kref *ref) ref 239 net/batman-adv/originator.c neigh_ifinfo = container_of(ref, struct batadv_neigh_ifinfo, refcount); ref 262 net/batman-adv/originator.c static void batadv_hardif_neigh_release(struct kref *ref) ref 266 net/batman-adv/originator.c hardif_neigh = container_of(ref, struct batadv_hardif_neigh_node, ref 292 net/batman-adv/originator.c static void batadv_neigh_node_release(struct kref *ref) ref 298 net/batman-adv/originator.c neigh_node = container_of(ref, struct batadv_neigh_node, refcount); ref 854 net/batman-adv/originator.c static void batadv_orig_ifinfo_release(struct kref *ref) ref 859 net/batman-adv/originator.c orig_ifinfo = container_of(ref, struct batadv_orig_ifinfo, refcount); ref 905 net/batman-adv/originator.c static void batadv_orig_node_release(struct kref *ref) ref 914 net/batman-adv/originator.c orig_node = container_of(ref, struct batadv_orig_node, refcount); ref 513 net/batman-adv/soft-interface.c static void batadv_softif_vlan_release(struct kref *ref) ref 517 net/batman-adv/soft-interface.c vlan = container_of(ref, struct batadv_softif_vlan, refcount); ref 333 net/batman-adv/tp_meter.c static void batadv_tp_vars_release(struct kref *ref) ref 338 net/batman-adv/tp_meter.c tp_vars = container_of(ref, struct batadv_tp_vars, refcount); ref 231 net/batman-adv/translation-table.c static void batadv_tt_local_entry_release(struct kref *ref) ref 235 net/batman-adv/translation-table.c tt_local_entry = container_of(ref, struct batadv_tt_local_entry, ref 274 net/batman-adv/translation-table.c static void batadv_tt_global_entry_release(struct kref *ref) ref 278 net/batman-adv/translation-table.c tt_global_entry = container_of(ref, struct batadv_tt_global_entry, ref 437 net/batman-adv/translation-table.c static void batadv_tt_orig_list_entry_release(struct kref *ref) ref 441 net/batman-adv/translation-table.c orig_entry = container_of(ref, struct batadv_tt_orig_list_entry, ref 2803 net/batman-adv/translation-table.c static void batadv_tt_req_node_release(struct kref *ref) ref 2807 net/batman-adv/translation-table.c tt_req_node = container_of(ref, struct batadv_tt_req_node, refcount); ref 38 net/batman-adv/tvlv.c static void batadv_tvlv_handler_release(struct kref *ref) ref 42 net/batman-adv/tvlv.c tvlv_handler = container_of(ref, struct batadv_tvlv_handler, refcount); ref 94 net/batman-adv/tvlv.c static void batadv_tvlv_container_release(struct kref *ref) ref 98 net/batman-adv/tvlv.c tvlv = container_of(ref, struct batadv_tvlv_container, refcount); ref 929 net/bluetooth/hidp/core.c kref_init(&session->ref); ref 976 net/bluetooth/hidp/core.c kref_get(&session->ref); ref 980 net/bluetooth/hidp/core.c static void session_free(struct kref *ref) ref 982 net/bluetooth/hidp/core.c struct hidp_session *session = container_of(ref, struct hidp_session, ref 983 net/bluetooth/hidp/core.c ref); ref 997 net/bluetooth/hidp/core.c kref_put(&session->ref, session_free); ref 139 net/bluetooth/hidp/hidp.h struct kref ref; ref 1755 net/bluetooth/l2cap_core.c static void l2cap_conn_free(struct kref *ref) ref 1757 net/bluetooth/l2cap_core.c struct l2cap_conn *conn = container_of(ref, struct l2cap_conn, ref); ref 1765 net/bluetooth/l2cap_core.c kref_get(&conn->ref); ref 1772 net/bluetooth/l2cap_core.c kref_put(&conn->ref, l2cap_conn_free); ref 7100 net/bluetooth/l2cap_core.c kref_init(&conn->ref); ref 88 net/ceph/string_table.c void ceph_release_string(struct kref *ref) ref 90 net/ceph/string_table.c struct ceph_string *cs = container_of(ref, struct ceph_string, kref); ref 81 net/dsa/dsa2.c static void dsa_tree_release(struct kref *ref) ref 85 net/dsa/dsa2.c dst = container_of(ref, struct dsa_switch_tree, refcount); ref 162 net/mac80211/key.h #define key_mtx_dereference(local, ref) \ ref 163 net/mac80211/key.h rcu_dereference_protected(ref, lockdep_is_held(&((local)->key_mtx))) ref 33 net/mac80211/rate.c struct rate_control_ref *ref = sta->rate_ctrl; ref 41 net/mac80211/rate.c if (!ref) ref 55 net/mac80211/rate.c ref->ops->rate_init(ref->priv, sband, &chanctx_conf->def, ista, ref 66 net/mac80211/rate.c struct rate_control_ref *ref = local->rate_ctrl; ref 70 net/mac80211/rate.c if (!ref || !test_sta_flag(sta, WLAN_STA_RATE_CONTROL)) ref 74 net/mac80211/rate.c if (ref->ops->tx_status_ext) ref 75 net/mac80211/rate.c ref->ops->tx_status_ext(ref->priv, sband, priv_sta, st); ref 77 net/mac80211/rate.c ref->ops->tx_status(ref->priv, sband, st->sta, priv_sta, st->skb); ref 88 net/mac80211/rate.c struct rate_control_ref *ref = local->rate_ctrl; ref 93 net/mac80211/rate.c if (ref && ref->ops->rate_update) { ref 103 net/mac80211/rate.c ref->ops->rate_update(ref->priv, sband, &chanctx_conf->def, ref 210 net/mac80211/rate.c struct rate_control_ref *ref = file->private_data; ref 211 net/mac80211/rate.c int len = strlen(ref->ops->name); ref 214 net/mac80211/rate.c ref->ops->name, len); ref 227 net/mac80211/rate.c struct rate_control_ref *ref; ref 229 net/mac80211/rate.c ref = kmalloc(sizeof(struct rate_control_ref), GFP_KERNEL); ref 230 net/mac80211/rate.c if (!ref) ref 232 net/mac80211/rate.c ref->ops = ieee80211_rate_control_ops_get(name); ref 233 net/mac80211/rate.c if (!ref->ops) ref 236 net/mac80211/rate.c ref->priv = ref->ops->alloc(&local->hw); ref 237 net/mac80211/rate.c if (!ref->priv) ref 239 net/mac80211/rate.c return ref; ref 242 net/mac80211/rate.c kfree(ref); ref 875 net/mac80211/rate.c struct rate_control_ref *ref = sdata->local->rate_ctrl; ref 900 net/mac80211/rate.c ref->ops->get_rate(ref->priv, ista, priv_sta, txrc); ref 948 net/mac80211/rate.c struct rate_control_ref *ref; ref 961 net/mac80211/rate.c ref = rate_control_alloc(name, local); ref 962 net/mac80211/rate.c if (!ref) { ref 969 net/mac80211/rate.c local->rate_ctrl = ref; ref 972 net/mac80211/rate.c ref->ops->name); ref 979 net/mac80211/rate.c struct rate_control_ref *ref; ref 981 net/mac80211/rate.c ref = local->rate_ctrl; ref 983 net/mac80211/rate.c if (!ref) ref 987 net/mac80211/rate.c rate_control_free(local, ref); ref 37 net/mac80211/rate.h static inline void *rate_control_alloc_sta(struct rate_control_ref *ref, ref 41 net/mac80211/rate.h return ref->ops->alloc_sta(ref->priv, &sta->sta, gfp); ref 46 net/mac80211/rate.h struct rate_control_ref *ref = sta->rate_ctrl; ref 50 net/mac80211/rate.h ref->ops->free_sta(ref->priv, ista, priv_sta); ref 56 net/mac80211/rate.h struct rate_control_ref *ref = sta->rate_ctrl; ref 57 net/mac80211/rate.h if (ref && sta->debugfs_dir && ref->ops->add_sta_debugfs) ref 58 net/mac80211/rate.h ref->ops->add_sta_debugfs(ref->priv, sta->rate_ctrl_priv, ref 2447 net/mac80211/sta_info.c struct rate_control_ref *ref = NULL; ref 2451 net/mac80211/sta_info.c ref = local->rate_ctrl; ref 2454 net/mac80211/sta_info.c if (ref && ref->ops->get_expected_throughput) ref 2455 net/mac80211/sta_info.c thr = ref->ops->get_expected_throughput(sta->rate_ctrl_priv); ref 124 net/mac802154/llsec.c kref_init(&key->ref); ref 162 net/mac802154/llsec.c static void llsec_key_release(struct kref *ref) ref 167 net/mac802154/llsec.c key = container_of(ref, struct mac802154_llsec_key, ref); ref 179 net/mac802154/llsec.c kref_get(&key->ref); ref 185 net/mac802154/llsec.c kref_put(&key->ref, llsec_key_release); ref 26 net/mac802154/llsec.h struct kref ref; ref 102 net/netfilter/ipset/ip_set_bitmap_gen.h nla_put_net32(skb, IPSET_ATTR_REFERENCES, htonl(set->ref)) || ref 515 net/netfilter/ipset/ip_set_core.c set->ref++; ref 523 net/netfilter/ipset/ip_set_core.c BUG_ON(set->ref == 0); ref 524 net/netfilter/ipset/ip_set_core.c set->ref--; ref 1071 net/netfilter/ipset/ip_set_core.c if (s && (s->ref || s->ref_netlink)) { ref 1093 net/netfilter/ipset/ip_set_core.c } else if (s->ref || s->ref_netlink) { ref 1181 net/netfilter/ipset/ip_set_core.c if (set->ref != 0 || set->ref_netlink != 0) { ref 1254 net/netfilter/ipset/ip_set_core.c swap(from->ref, to->ref); ref 107 net/netfilter/ipset/ip_set_hash_gen.h atomic_t ref; /* References for resizing */ ref 614 net/netfilter/ipset/ip_set_hash_gen.h if (atomic_dec_and_test(&t->uref) && atomic_read(&t->ref)) { ref 698 net/netfilter/ipset/ip_set_hash_gen.h atomic_set(&orig->ref, 1); ref 814 net/netfilter/ipset/ip_set_hash_gen.h atomic_set(&orig->ref, 0); ref 1008 net/netfilter/ipset/ip_set_hash_gen.h if (atomic_read(&t->ref) && ext->target) { ref 1035 net/netfilter/ipset/ip_set_hash_gen.h if (atomic_dec_and_test(&t->uref) && atomic_read(&t->ref)) { ref 1096 net/netfilter/ipset/ip_set_hash_gen.h if (atomic_read(&t->ref) && ext->target) { ref 1148 net/netfilter/ipset/ip_set_hash_gen.h if (atomic_dec_and_test(&t->uref) && atomic_read(&t->ref)) { ref 1308 net/netfilter/ipset/ip_set_hash_gen.h if (nla_put_net32(skb, IPSET_ATTR_REFERENCES, htonl(set->ref)) || ref 1336 net/netfilter/ipset/ip_set_hash_gen.h if (atomic_dec_and_test(&t->uref) && atomic_read(&t->ref)) { ref 469 net/netfilter/ipset/ip_set_list_set.c nla_put_net32(skb, IPSET_ATTR_REFERENCES, htonl(set->ref)) || ref 55 net/nfc/llcp.h struct kref ref; ref 148 net/nfc/llcp_core.c kref_get(&local->ref); ref 167 net/nfc/llcp_core.c static void local_release(struct kref *ref) ref 171 net/nfc/llcp_core.c local = container_of(ref, struct nfc_llcp_local, ref); ref 183 net/nfc/llcp_core.c return kref_put(&local->ref, local_release); ref 1578 net/nfc/llcp_core.c kref_init(&local->ref); ref 123 net/qrtr/qrtr.c struct kref ref; ref 145 net/qrtr/qrtr.c struct qrtr_node *node = container_of(kref, struct qrtr_node, ref); ref 162 net/qrtr/qrtr.c kref_get(&node->ref); ref 171 net/qrtr/qrtr.c kref_put_mutex(&node->ref, __qrtr_node_release, &qrtr_node_lock); ref 430 net/qrtr/qrtr.c kref_init(&node->ref); ref 748 net/sched/act_api.c tcf_action_dump_old(struct sk_buff *skb, struct tc_action *a, int bind, int ref) ref 750 net/sched/act_api.c return a->ops->dump(skb, a, bind, ref); ref 754 net/sched/act_api.c tcf_action_dump_1(struct sk_buff *skb, struct tc_action *a, int bind, int ref) ref 779 net/sched/act_api.c err = tcf_action_dump_old(skb, a, bind, ref); ref 792 net/sched/act_api.c int bind, int ref) ref 803 net/sched/act_api.c err = tcf_action_dump_1(skb, a, bind, ref); ref 1040 net/sched/act_api.c int ref) ref 1059 net/sched/act_api.c if (tcf_action_dump(skb, actions, bind, ref) < 0) ref 135 net/sched/act_bpf.c int bind, int ref) ref 141 net/sched/act_bpf.c .refcnt = refcount_read(&prog->tcf_refcnt) - ref, ref 170 net/sched/act_connmark.c int bind, int ref) ref 176 net/sched/act_connmark.c .refcnt = refcount_read(&ci->tcf_refcnt) - ref, ref 633 net/sched/act_csum.c int ref) ref 640 net/sched/act_csum.c .refcnt = refcount_read(&p->tcf_refcnt) - ref, ref 830 net/sched/act_ct.c int bind, int ref) ref 838 net/sched/act_ct.c .refcnt = refcount_read(&c->tcf_refcnt) - ref, ref 283 net/sched/act_ctinfo.c int bind, int ref) ref 288 net/sched/act_ctinfo.c .refcnt = refcount_read(&ci->tcf_refcnt) - ref, ref 193 net/sched/act_gact.c int bind, int ref) ref 199 net/sched/act_gact.c .refcnt = refcount_read(&gact->tcf_refcnt) - ref, ref 619 net/sched/act_ife.c int ref) ref 626 net/sched/act_ife.c .refcnt = refcount_read(&ife->tcf_refcnt) - ref, ref 276 net/sched/act_ipt.c int ref) ref 295 net/sched/act_ipt.c c.refcnt = refcount_read(&ipt->tcf_refcnt) - ref; ref 329 net/sched/act_mirred.c int ref) ref 335 net/sched/act_mirred.c .refcnt = refcount_read(&m->tcf_refcnt) - ref, ref 298 net/sched/act_mpls.c int bind, int ref) ref 305 net/sched/act_mpls.c .refcnt = refcount_read(&m->tcf_refcnt) - ref, ref 263 net/sched/act_nat.c int bind, int ref) ref 269 net/sched/act_nat.c .refcnt = refcount_read(&p->tcf_refcnt) - ref, ref 412 net/sched/act_pedit.c int bind, int ref) ref 434 net/sched/act_pedit.c opt->refcnt = refcount_read(&p->tcf_refcnt) - ref; ref 305 net/sched/act_police.c int bind, int ref) ref 312 net/sched/act_police.c .refcnt = refcount_read(&police->tcf_refcnt) - ref, ref 200 net/sched/act_sample.c int bind, int ref) ref 206 net/sched/act_sample.c .refcnt = refcount_read(&s->tcf_refcnt) - ref, ref 171 net/sched/act_simple.c int bind, int ref) ref 177 net/sched/act_simple.c .refcnt = refcount_read(&d->tcf_refcnt) - ref, ref 229 net/sched/act_skbedit.c int bind, int ref) ref 236 net/sched/act_skbedit.c .refcnt = refcount_read(&d->tcf_refcnt) - ref, ref 215 net/sched/act_skbmod.c int bind, int ref) ref 222 net/sched/act_skbmod.c .refcnt = refcount_read(&d->tcf_refcnt) - ref, ref 513 net/sched/act_tunnel_key.c int bind, int ref) ref 520 net/sched/act_tunnel_key.c .refcnt = refcount_read(&t->tcf_refcnt) - ref, ref 253 net/sched/act_vlan.c int bind, int ref) ref 260 net/sched/act_vlan.c .refcnt = refcount_read(&v->tcf_refcnt) - ref, ref 186 net/sched/ematch.c u32 ref; ref 188 net/sched/ematch.c if (data_len < sizeof(ref)) ref 190 net/sched/ematch.c ref = *(u32 *) data; ref 192 net/sched/ematch.c if (ref >= tree_hdr->nmatches) ref 198 net/sched/ematch.c if (ref <= idx) ref 202 net/sched/ematch.c em->data = ref; ref 54 net/sched/sch_atm.c int ref; /* reference count */ ref 133 net/sched/sch_atm.c flow->ref++; ref 149 net/sched/sch_atm.c if (--flow->ref) ref 308 net/sched/sch_atm.c flow->ref = 1; ref 337 net/sched/sch_atm.c if (flow->ref < 2) { ref 338 net/sched/sch_atm.c pr_err("atm_tc_delete: flow->ref == %d\n", flow->ref); ref 341 net/sched/sch_atm.c if (flow->ref > 2) ref 565 net/sched/sch_atm.c p->link.ref = 1; ref 593 net/sched/sch_atm.c if (flow->ref > 1) ref 594 net/sched/sch_atm.c pr_err("atm_destroy: %p->ref = %d\n", flow, flow->ref); ref 102 net/sunrpc/auth_gss/svcauth_gss.c static void rsi_put(struct kref *ref) ref 104 net/sunrpc/auth_gss/svcauth_gss.c struct rsi *rsii = container_of(ref, struct rsi, h.ref); ref 365 net/sunrpc/auth_gss/svcauth_gss.c static void rsc_put(struct kref *ref) ref 367 net/sunrpc/auth_gss/svcauth_gss.c struct rsc *rsci = container_of(ref, struct rsc, h.ref); ref 813 net/sunrpc/auth_gss/svcauth_gss.c kref_init(&new->h.ref); ref 48 net/sunrpc/cache.c kref_init(&h->ref); ref 1409 net/sunrpc/cache.c kref_read(&cp->ref), cp->flags); ref 149 net/sunrpc/svcauth.c struct auth_domain *dom = container_of(kref, struct auth_domain, ref); ref 158 net/sunrpc/svcauth.c kref_put_lock(&dom->ref, auth_domain_release, &auth_domain_lock); ref 174 net/sunrpc/svcauth.c kref_get(&hp->ref); ref 196 net/sunrpc/svcauth.c if (!kref_get_unless_zero(&hp->ref)) ref 76 net/sunrpc/svcauth_unix.c kref_init(&new->h.ref); ref 106 net/sunrpc/svcauth_unix.c struct cache_head *item = container_of(kref, struct cache_head, ref); ref 139 net/sunrpc/svcauth_unix.c kref_get(&item->m_client->h.ref); ref 433 net/sunrpc/svcauth_unix.c struct cache_head *item = container_of(kref, struct cache_head, ref); ref 712 net/sunrpc/svcauth_unix.c kref_get(&rqstp->rq_client->ref); ref 674 net/tipc/group.c evt.port.ref = m->port; ref 858 net/tipc/group.c u32 port = evt->port.ref; ref 126 net/tipc/name_table.h u32 node, u32 ref, u32 key); ref 688 net/tipc/socket.c addr->addr.id.ref = tsk_peer_port(tsk); ref 691 net/tipc/socket.c addr->addr.id.ref = tsk->portid; ref 901 net/tipc/socket.c port = dest->addr.id.ref; ref 1116 net/tipc/socket.c tipc_dest_pop(&dsts, &dest->addr.id.node, &dest->addr.id.ref); ref 1390 net/tipc/socket.c msg_set_destport(hdr, dest->addr.id.ref); ref 1564 net/tipc/socket.c srcaddr->sock.addr.id.ref = msg_origport(hdr); ref 3147 net/tipc/socket.c tsk1->peer.addr.id.ref = tsk2->portid; ref 3152 net/tipc/socket.c tsk2->peer.addr.id.ref = tsk1->portid; ref 52 net/tipc/subscr.c tipc_evt_write(evt, port.ref, port); ref 859 scripts/asn1_compiler.c struct type **ref; ref 1012 scripts/asn1_compiler.c ref = bsearch(cursor, type_index, nr_types, sizeof(type_index[0]), ref 1014 scripts/asn1_compiler.c if (!ref) { ref 1019 scripts/asn1_compiler.c cursor->type = *ref; ref 1020 scripts/asn1_compiler.c (*ref)->ref_count++; ref 457 scripts/dtc/checks.c check_duplicate_label(c, dti, m->ref, node, prop, m); ref 483 scripts/dtc/checks.c if (node != get_node_by_ref(root, m->ref)) ref 593 scripts/dtc/checks.c refnode = get_node_by_ref(dt, m->ref); ref 597 scripts/dtc/checks.c "label \"%s\"\n", m->ref); ref 628 scripts/dtc/checks.c refnode = get_node_by_ref(dt, m->ref); ref 631 scripts/dtc/checks.c m->ref); ref 15 scripts/dtc/data.c free(m->ref); ref 227 scripts/dtc/data.c struct data data_add_marker(struct data d, enum markertype type, char *ref) ref 234 scripts/dtc/data.c m->ref = ref; ref 78 scripts/dtc/dtc.h char *ref; ref 119 scripts/dtc/dtc.h struct data data_add_marker(struct data d, enum markertype type, char *ref); ref 210 scripts/dtc/dtc.h struct node *add_orphan_node(struct node *old_node, struct node *new_node, char *ref); ref 234 scripts/dtc/dtc.h struct node *get_node_by_ref(struct node *tree, const char *ref); ref 156 scripts/dtc/flattree.c emit_offset_label(f, m->ref, m->offset); ref 228 scripts/dtc/livetree.c struct node * add_orphan_node(struct node *dt, struct node *new_node, char *ref) ref 236 scripts/dtc/livetree.c if (ref[0] == '/') { ref 237 scripts/dtc/livetree.c d = data_add_marker(d, TYPE_STRING, ref); ref 238 scripts/dtc/livetree.c d = data_append_data(d, ref, strlen(ref) + 1); ref 242 scripts/dtc/livetree.c d = data_add_marker(d, REF_PHANDLE, ref); ref 486 scripts/dtc/livetree.c if (streq(m->ref, label)) ref 583 scripts/dtc/livetree.c struct node *get_node_by_ref(struct node *tree, const char *ref) ref 585 scripts/dtc/livetree.c if (streq(ref, "/")) ref 587 scripts/dtc/livetree.c else if (ref[0] == '/') ref 588 scripts/dtc/livetree.c return get_node_by_path(tree, ref); ref 590 scripts/dtc/livetree.c return get_node_by_label(tree, ref); ref 874 scripts/dtc/livetree.c if (!get_node_by_ref(dti->dt, m->ref)) ref 902 scripts/dtc/livetree.c append_to_property(fn, m->ref, entry, strlen(entry) + 1, TYPE_STRING); ref 920 scripts/dtc/livetree.c refnode = get_node_by_ref(dt, m->ref); ref 939 scripts/dtc/livetree.c if (get_node_by_ref(dti->dt, m->ref)) ref 1001 scripts/dtc/livetree.c refnode = get_node_by_ref(dt, m->ref); ref 224 scripts/dtc/treesource.c dummy_marker.ref = NULL; ref 237 scripts/dtc/treesource.c fprintf(f, " %s:", m->ref); ref 934 scripts/gcc-plugins/gcc-common.h static inline cgraph_node_ptr ipa_ref_referring_node(struct ipa_ref *ref) ref 936 scripts/gcc-plugins/gcc-common.h return dyn_cast<cgraph_node_ptr>(ref->referring); ref 772 sound/aoa/fabrics/layout.c const u32 *ref; ref 780 sound/aoa/fabrics/layout.c ref = of_get_property(ldev->sound, propname, NULL); ref 781 sound/aoa/fabrics/layout.c if (!ref) { ref 786 sound/aoa/fabrics/layout.c if (*ref != codec->node->phandle) { ref 122 sound/core/sound.c struct snd_card *ref = snd_card_ref(card); ref 123 sound/core/sound.c if (!ref) ref 126 sound/core/sound.c snd_card_unref(ref); ref 1173 sound/pci/mixart/mixart.c u32 ref; ref 1191 sound/pci/mixart/mixart.c ref = readl_be( MIXART_MEM( chip->mgr, MIXART_PSEUDOREG_PERF_SYSTEM_LOAD_OFFSET)); ref 1193 sound/pci/mixart/mixart.c if (ref) { ref 1194 sound/pci/mixart/mixart.c u32 mailbox = 100 * readl_be( MIXART_MEM( chip->mgr, MIXART_PSEUDOREG_PERF_MAILBX_LOAD_OFFSET)) / ref; ref 1195 sound/pci/mixart/mixart.c u32 streaming = 100 * readl_be( MIXART_MEM( chip->mgr, MIXART_PSEUDOREG_PERF_STREAM_LOAD_OFFSET)) / ref; ref 1196 sound/pci/mixart/mixart.c u32 interr = 100 * readl_be( MIXART_MEM( chip->mgr, MIXART_PSEUDOREG_PERF_INTERR_LOAD_OFFSET)) / ref; ref 1261 sound/pci/pcxhr/pcxhr.c int ref = rmh.stat[1]; ref 1262 sound/pci/pcxhr/pcxhr.c if (ref > 0) { ref 1265 sound/pci/pcxhr/pcxhr.c ref = (ref * 48000) / ref 1269 sound/pci/pcxhr/pcxhr.c ref *= 2; ref 1271 sound/pci/pcxhr/pcxhr.c cur = 100 - (100 * cur) / ref; ref 1281 sound/soc/codecs/arizona.c int ref, div, refclk; ref 1301 sound/soc/codecs/arizona.c for (ref = 0; ref < ARRAY_SIZE(arizona_opclk_ref_48k_rates) && ref 1302 sound/soc/codecs/arizona.c rates[ref] <= refclk; ref++) { ref 1304 sound/soc/codecs/arizona.c while (rates[ref] / div >= freq && div < 32) { ref 1305 sound/soc/codecs/arizona.c if (rates[ref] / div == freq) { ref 1313 sound/soc/codecs/arizona.c ref); ref 2464 sound/soc/codecs/madera.c int ref, div, refclk; ref 2487 sound/soc/codecs/madera.c for (ref = 0; ref < ARRAY_SIZE(madera_opclk_ref_48k_rates); ++ref) { ref 2488 sound/soc/codecs/madera.c if (rates[ref] > refclk) ref 2492 sound/soc/codecs/madera.c while ((rates[ref] / div >= freq) && (div <= 30)) { ref 2493 sound/soc/codecs/madera.c if (rates[ref] / div == freq) { ref 2497 sound/soc/codecs/madera.c val = (div << MADERA_OPCLK_DIV_SHIFT) | ref; ref 1191 sound/soc/codecs/rt5668.c int ref, val, reg, idx = -EINVAL; ref 1198 sound/soc/codecs/rt5668.c ref = 256 * rt5668->lrck[RT5668_AIF2]; ref 1200 sound/soc/codecs/rt5668.c ref = 256 * rt5668->lrck[RT5668_AIF1]; ref 1202 sound/soc/codecs/rt5668.c idx = rt5668_div_sel(rt5668, ref, div, ARRAY_SIZE(div)); ref 1217 sound/soc/codecs/rt5682.c int ref, val, reg, idx = -EINVAL; ref 1225 sound/soc/codecs/rt5682.c ref = 256 * rt5682->lrck[RT5682_AIF2]; ref 1227 sound/soc/codecs/rt5682.c ref = 256 * rt5682->lrck[RT5682_AIF1]; ref 1229 sound/soc/codecs/rt5682.c idx = rt5682_div_sel(rt5682, ref, div_f, ARRAY_SIZE(div_f)); ref 112 sound/soc/qcom/qdsp6/q6adm.c static void q6adm_free_copp(struct kref *ref) ref 114 sound/soc/qcom/qdsp6/q6adm.c struct q6copp *c = container_of(ref, struct q6copp, refcount); ref 712 sound/soc/qcom/qdsp6/q6afe.c static void q6afe_port_free(struct kref *ref) ref 718 sound/soc/qcom/qdsp6/q6afe.c port = container_of(ref, struct q6afe_port, refcount); ref 470 sound/soc/qcom/qdsp6/q6asm.c static void q6asm_audio_client_release(struct kref *ref) ref 476 sound/soc/qcom/qdsp6/q6asm.c ac = container_of(ref, struct audio_client, refcount); ref 124 tools/bpf/bpftool/main.c const char *ref; ref 130 tools/bpf/bpftool/main.c while ((ref = va_arg(ap, const char *))) { ref 131 tools/bpf/bpftool/main.c if (!is_prefix(arg, ref)) ref 136 tools/bpf/bpftool/main.c strncat(msg, ref, sizeof(msg) - strlen(msg) - 1); ref 2221 tools/lib/traceevent/event-parse.c char *ref; ref 2233 tools/lib/traceevent/event-parse.c ref = malloc(len); ref 2234 tools/lib/traceevent/event-parse.c if (!ref) { ref 2238 tools/lib/traceevent/event-parse.c memcpy(ref, type, len); ref 2241 tools/lib/traceevent/event-parse.c ref[len - 2] = 0; ref 2243 tools/lib/traceevent/event-parse.c val = eval_type_str(val, ref, 0); ref 2244 tools/lib/traceevent/event-parse.c free(ref); ref 995 tools/perf/arch/x86/util/intel-pt.c struct intel_pt_snapshot_ref *ref = &ptr->snapshot_refs[idx]; ref 998 tools/perf/arch/x86/util/intel-pt.c wrapped = intel_pt_compare_ref(ref->ref_buf, ref->ref_offset, ref 1002 tools/perf/arch/x86/util/intel-pt.c intel_pt_copy_ref(ref->ref_buf, ptr->snapshot_ref_buf_size, mm->len, ref 1237 tools/perf/util/auxtrace.c u64 head, old = mm->prev, offset, ref; ref 1274 tools/perf/util/auxtrace.c ref = auxtrace_record__reference(itr); ref 1318 tools/perf/util/auxtrace.c ev.auxtrace.reference = ref; ref 506 tools/perf/util/bpf-loader.c ref1 = arg1->ref; ref 507 tools/perf/util/bpf-loader.c ref2 = arg2->ref; ref 249 tools/perf/util/bpf-prologue.c struct probe_trace_arg_ref *ref = NULL; ref 278 tools/perf/util/bpf-prologue.c ref = arg->ref; ref 279 tools/perf/util/bpf-prologue.c while (ref) { ref 281 tools/perf/util/bpf-prologue.c i, ref->offset); ref 283 tools/perf/util/bpf-prologue.c ref->offset); ref 289 tools/perf/util/bpf-prologue.c ref = ref->next; ref 295 tools/perf/util/bpf-prologue.c if (ref) ref 303 tools/perf/util/bpf-prologue.c int insn_sz = (args[i].ref) ? argtype_to_ldx_size(args[i].type) : BPF_DW; ref 398 tools/perf/util/bpf-prologue.c struct probe_trace_arg_ref *ref = args[i].ref; ref 403 tools/perf/util/bpf-prologue.c args[i].value, ref ? ref->offset : 0); ref 407 tools/perf/util/bpf-prologue.c while (ref) { ref 420 tools/perf/util/bpf-prologue.c if (ref->offset > OFFSET_MAX || ref 421 tools/perf/util/bpf-prologue.c ref->offset < OFFSET_MIN) { ref 423 tools/perf/util/bpf-prologue.c ref->offset); ref 427 tools/perf/util/bpf-prologue.c ref = ref->next; ref 2638 tools/perf/util/hist.c char ref[30] = " show reference callgraph, "; ref 2676 tools/perf/util/hist.c ev_name, sample_freq_str, enable_ref ? ref : " ", nr_events); ref 1612 tools/perf/util/probe-event.c (*fieldp)->ref = true; ref 1624 tools/perf/util/probe-event.c (*fieldp)->ref = false; ref 1627 tools/perf/util/probe-event.c (*fieldp)->ref = true; ref 1641 tools/perf/util/probe-event.c pr_debug("%s(%d), ", (*fieldp)->name, (*fieldp)->ref); ref 1650 tools/perf/util/probe-event.c pr_debug("%s(%d)\n", (*fieldp)->name, (*fieldp)->ref); ref 1882 tools/perf/util/probe-event.c err = strbuf_addf(&buf, "%s%s", field->ref ? "->" : ".", ref 1970 tools/perf/util/probe-event.c static int __synthesize_probe_trace_arg_ref(struct probe_trace_arg_ref *ref, ref 1974 tools/perf/util/probe-event.c if (ref->next) { ref 1975 tools/perf/util/probe-event.c depth = __synthesize_probe_trace_arg_ref(ref->next, buf, ref 1980 tools/perf/util/probe-event.c err = strbuf_addf(buf, "%+ld(", ref->offset); ref 1987 tools/perf/util/probe-event.c struct probe_trace_arg_ref *ref = arg->ref; ref 1999 tools/perf/util/probe-event.c if (arg->value[0] == '@' && arg->ref) ref 2000 tools/perf/util/probe-event.c ref = ref->next; ref 2003 tools/perf/util/probe-event.c if (ref) { ref 2004 tools/perf/util/probe-event.c depth = __synthesize_probe_trace_arg_ref(ref, buf, 1); ref 2010 tools/perf/util/probe-event.c if (arg->value[0] == '@' && arg->ref) ref 2011 tools/perf/util/probe-event.c err = strbuf_addf(buf, "%s%+ld", arg->value, arg->ref->offset); ref 2274 tools/perf/util/probe-event.c (*ppfield)->ref = field->ref; ref 2313 tools/perf/util/probe-event.c struct probe_trace_arg_ref *ref, *next; ref 2325 tools/perf/util/probe-event.c ref = tev->args[i].ref; ref 2326 tools/perf/util/probe-event.c while (ref) { ref 2327 tools/perf/util/probe-event.c next = ref->next; ref 2328 tools/perf/util/probe-event.c free(ref); ref 2329 tools/perf/util/probe-event.c ref = next; ref 48 tools/perf/util/probe-event.h struct probe_trace_arg_ref *ref; /* Referencing offset */ ref 77 tools/perf/util/probe-event.h bool ref; /* Referencing flag */ ref 149 tools/perf/util/probe-finder.c struct probe_trace_arg_ref *ref; ref 150 tools/perf/util/probe-finder.c ref = zalloc(sizeof(struct probe_trace_arg_ref)); ref 151 tools/perf/util/probe-finder.c if (ref != NULL) ref 152 tools/perf/util/probe-finder.c ref->offset = offs; ref 153 tools/perf/util/probe-finder.c return ref; ref 173 tools/perf/util/probe-finder.c bool ref = false; ref 225 tools/perf/util/probe-finder.c tvar->ref = alloc_trace_arg_ref((long)offs); ref 226 tools/perf/util/probe-finder.c if (tvar->ref == NULL) ref 235 tools/perf/util/probe-finder.c ref = true; ref 243 tools/perf/util/probe-finder.c ref = true; ref 249 tools/perf/util/probe-finder.c ref = true; ref 272 tools/perf/util/probe-finder.c if (ref) { ref 273 tools/perf/util/probe-finder.c tvar->ref = alloc_trace_arg_ref((long)offs); ref 274 tools/perf/util/probe-finder.c if (tvar->ref == NULL) ref 286 tools/perf/util/probe-finder.c struct probe_trace_arg_ref **ref_ptr = &tvar->ref; ref 405 tools/perf/util/probe-finder.c struct probe_trace_arg_ref *ref = *ref_ptr; ref 431 tools/perf/util/probe-finder.c ref = zalloc(sizeof(struct probe_trace_arg_ref)); ref 432 tools/perf/util/probe-finder.c if (ref == NULL) ref 435 tools/perf/util/probe-finder.c (*ref_ptr)->next = ref; ref 437 tools/perf/util/probe-finder.c *ref_ptr = ref; ref 439 tools/perf/util/probe-finder.c ref->offset += dwarf_bytesize(&type) * field->index; ref 440 tools/perf/util/probe-finder.c ref->user_access = user_access; ref 444 tools/perf/util/probe-finder.c if (!field->ref) { ref 462 tools/perf/util/probe-finder.c ref = zalloc(sizeof(struct probe_trace_arg_ref)); ref 463 tools/perf/util/probe-finder.c if (ref == NULL) ref 466 tools/perf/util/probe-finder.c (*ref_ptr)->next = ref; ref 468 tools/perf/util/probe-finder.c *ref_ptr = ref; ref 482 tools/perf/util/probe-finder.c if (field->ref && dwarf_diename(vr_die)) { ref 487 tools/perf/util/probe-finder.c if (!ref) { ref 511 tools/perf/util/probe-finder.c ref->offset += (long)offs; ref 512 tools/perf/util/probe-finder.c ref->user_access = user_access; ref 517 tools/perf/util/probe-finder.c &ref, die_mem, user_access); ref 523 tools/perf/util/probe-finder.c field->next, &ref, die_mem, user_access); ref 548 tools/perf/util/probe-finder.c pf->pvar->field, &pf->tvar->ref, ref 2239 tools/perf/util/session.c struct ref_reloc_sym *ref; ref 2242 tools/perf/util/session.c ref = zalloc(sizeof(struct ref_reloc_sym)); ref 2243 tools/perf/util/session.c if (ref == NULL) ref 2246 tools/perf/util/session.c ref->name = strdup(symbol_name); ref 2247 tools/perf/util/session.c if (ref->name == NULL) { ref 2248 tools/perf/util/session.c free(ref); ref 2252 tools/perf/util/session.c bracket = strchr(ref->name, ']'); ref 2256 tools/perf/util/session.c ref->addr = addr; ref 2260 tools/perf/util/session.c kmap->ref_reloc_sym = ref; ref 56 tools/perf/util/thread-stack.c u64 ref; ref 277 tools/perf/util/thread-stack.c u64 timestamp, u64 ref, bool no_return) ref 296 tools/perf/util/thread-stack.c cr.call_ref = tse->ref; ref 297 tools/perf/util/thread-stack.c cr.return_ref = ref; ref 530 tools/perf/util/thread-stack.c u64 timestamp, u64 ref, struct call_path *cp, ref 548 tools/perf/util/thread-stack.c tse->ref = ref; ref 562 tools/perf/util/thread-stack.c u64 ret_addr, u64 timestamp, u64 ref, ref 575 tools/perf/util/thread-stack.c timestamp, ref, false); ref 581 tools/perf/util/thread-stack.c timestamp, ref, false); ref 593 tools/perf/util/thread-stack.c timestamp, ref, ref 599 tools/perf/util/thread-stack.c timestamp, ref, false); ref 609 tools/perf/util/thread-stack.c struct addr_location *to_al, u64 ref) ref 629 tools/perf/util/thread-stack.c return thread_stack__push_cp(ts, ip, sample->time, ref, cp, ref 634 tools/perf/util/thread-stack.c struct perf_sample *sample, u64 ref) ref 642 tools/perf/util/thread-stack.c tm, ref, true); ref 654 tools/perf/util/thread-stack.c struct addr_location *to_al, u64 ref) ref 669 tools/perf/util/thread-stack.c err = thread_stack__pop_ks(thread, ts, sample, ref); ref 676 tools/perf/util/thread-stack.c return thread_stack__push_cp(ts, 0, tm, ref, cp, true, ref 681 tools/perf/util/thread-stack.c err = thread_stack__pop_ks(thread, ts, sample, ref); ref 699 tools/perf/util/thread-stack.c tm, ref, false); ref 707 tools/perf/util/thread-stack.c return thread_stack__push_cp(ts, addr, tm, ref, cp, ref 717 tools/perf/util/thread-stack.c err = thread_stack__push_cp(ts, 0, tm, ref, cp, true, false); ref 731 tools/perf/util/thread-stack.c err = thread_stack__push_cp(ts, addr, tm, ref, cp, true, false); ref 737 tools/perf/util/thread-stack.c err = thread_stack__push_cp(ts, ip, tm, ref, cp, true, false); ref 741 tools/perf/util/thread-stack.c return thread_stack__call_return(thread, ts, --ts->cnt, tm, ref, false); ref 746 tools/perf/util/thread-stack.c u64 ref) ref 758 tools/perf/util/thread-stack.c timestamp, ref, false); ref 767 tools/perf/util/thread-stack.c struct perf_sample *sample, u64 ref) ref 774 tools/perf/util/thread-stack.c if (!ts->cnt || (ts->cnt == 1 && ts->stack[0].ref == ref)) ref 782 tools/perf/util/thread-stack.c return thread_stack__push_cp(ts, ret_addr, sample->time, ref, cp, ref 853 tools/perf/util/thread-stack.c struct addr_location *to_al, u64 ref, ref 887 tools/perf/util/thread-stack.c err = thread_stack__bottom(ts, sample, from_al, to_al, ref); ref 913 tools/perf/util/thread-stack.c err = thread_stack__push_cp(ts, ret_addr, sample->time, ref, ref 934 tools/perf/util/thread-stack.c return thread_stack__pop_ks(thread, ts, sample, ref); ref 946 tools/perf/util/thread-stack.c sample->time, ref, from_al->sym); ref 951 tools/perf/util/thread-stack.c from_al, to_al, ref); ref 954 tools/perf/util/thread-stack.c err = thread_stack__trace_begin(thread, ts, sample->time, ref); ref 956 tools/perf/util/thread-stack.c err = thread_stack__trace_end(ts, sample, ref); ref 972 tools/perf/util/thread-stack.c err = thread_stack__push_cp(ts, 0, sample->time, ref, cp, false, ref 99 tools/perf/util/thread-stack.h struct addr_location *to_al, u64 ref, ref 103 tools/testing/nvdimm/test/iomap.c WARN_ON(!pgmap || !pgmap->ref); ref 108 tools/testing/nvdimm/test/iomap.c percpu_ref_kill(pgmap->ref); ref 114 tools/testing/nvdimm/test/iomap.c percpu_ref_exit(pgmap->ref); ref 118 tools/testing/nvdimm/test/iomap.c static void dev_pagemap_percpu_release(struct percpu_ref *ref) ref 121 tools/testing/nvdimm/test/iomap.c container_of(ref, struct dev_pagemap, internal_ref); ref 135 tools/testing/nvdimm/test/iomap.c if (!pgmap->ref) { ref 144 tools/testing/nvdimm/test/iomap.c pgmap->ref = &pgmap->internal_ref; ref 53 tools/vm/slabinfo.c char *ref; ref 1084 tools/vm/slabinfo.c n1 = a1->ref; ref 1085 tools/vm/slabinfo.c n2 = a2->ref; ref 1106 tools/vm/slabinfo.c if (strcmp(a->ref, s->name) == 0) { ref 1112 tools/vm/slabinfo.c fatal("Unresolved alias %s\n", a->ref); ref 1205 tools/vm/slabinfo.c alias->ref = strdup(p); ref 118 virt/kvm/arm/vgic/vgic.c static void vgic_irq_release(struct kref *ref)