Home
last modified time | relevance | path

Searched refs:refs (Results 1 – 68 of 68) sorted by relevance

/linux-4.1.27/drivers/gpu/drm/nouveau/nvkm/core/
Devent.c31 if (--event->refs[index * event->types_nr + type] == 0) { in nvkm_event_put()
44 if (++event->refs[index * event->types_nr + type] == 1) { in nvkm_event_get()
58 if (!event->refs || WARN_ON(index >= event->index_nr)) in nvkm_event_send()
77 if (event->refs) { in nvkm_event_fini()
78 kfree(event->refs); in nvkm_event_fini()
79 event->refs = NULL; in nvkm_event_fini()
87 event->refs = kzalloc(sizeof(*event->refs) * index_nr * types_nr, in nvkm_event_init()
89 if (!event->refs) in nvkm_event_init()
Dnotify.c139 if ((notify->event = event), event->refs) { in nvkm_notify_init()
/linux-4.1.27/drivers/staging/lustre/lustre/libcfs/
Dlibcfs_lock.c152 cfs_percpt_atomic_free(atomic_t **refs) in cfs_percpt_atomic_free() argument
154 cfs_percpt_free(refs); in cfs_percpt_atomic_free()
162 atomic_t **refs; in cfs_percpt_atomic_alloc() local
166 refs = cfs_percpt_alloc(cptab, sizeof(*ref)); in cfs_percpt_atomic_alloc()
167 if (refs == NULL) in cfs_percpt_atomic_alloc()
170 cfs_percpt_for_each(ref, i, refs) in cfs_percpt_atomic_alloc()
172 return refs; in cfs_percpt_atomic_alloc()
178 cfs_percpt_atomic_summary(atomic_t **refs) in cfs_percpt_atomic_summary() argument
184 cfs_percpt_for_each(ref, i, refs) in cfs_percpt_atomic_summary()
/linux-4.1.27/arch/s390/mm/
Dgup.c56 int refs; in gup_huge_pmd() local
64 refs = 0; in gup_huge_pmd()
73 refs++; in gup_huge_pmd()
76 if (!page_cache_add_speculative(head, refs)) { in gup_huge_pmd()
77 *nr -= refs; in gup_huge_pmd()
82 *nr -= refs; in gup_huge_pmd()
83 while (refs--) in gup_huge_pmd()
92 while (refs--) { in gup_huge_pmd()
/linux-4.1.27/arch/sparc/mm/
Dgup.c74 int refs; in gup_huge_pmd() local
82 refs = 0; in gup_huge_pmd()
91 refs++; in gup_huge_pmd()
94 if (!page_cache_add_speculative(head, refs)) { in gup_huge_pmd()
95 *nr -= refs; in gup_huge_pmd()
100 *nr -= refs; in gup_huge_pmd()
101 while (refs--) in gup_huge_pmd()
109 while (refs--) { in gup_huge_pmd()
/linux-4.1.27/mm/
Dgup.c1077 int refs; in gup_huge_pmd() local
1082 refs = 0; in gup_huge_pmd()
1091 refs++; in gup_huge_pmd()
1094 if (!page_cache_add_speculative(head, refs)) { in gup_huge_pmd()
1095 *nr -= refs; in gup_huge_pmd()
1100 *nr -= refs; in gup_huge_pmd()
1101 while (refs--) in gup_huge_pmd()
1111 while (refs--) { in gup_huge_pmd()
1124 int refs; in gup_huge_pud() local
1129 refs = 0; in gup_huge_pud()
[all …]
Dhugetlb.c488 kref_init(&resv_map->refs); in resv_map_alloc()
497 struct resv_map *resv_map = container_of(ref, struct resv_map, refs); in resv_map_release()
2554 kref_get(&resv->refs); in hugetlb_vm_op_open()
2573 kref_put(&resv->refs, resv_map_release); in hugetlb_vm_op_close()
3646 kref_put(&resv_map->refs, resv_map_release); in hugetlb_reserve_pages()
/linux-4.1.27/arch/mips/mm/
Dgup.c76 int refs; in gup_huge_pmd() local
84 refs = 0; in gup_huge_pmd()
94 refs++; in gup_huge_pmd()
97 get_head_page_multiple(head, refs); in gup_huge_pmd()
142 int refs; in gup_huge_pud() local
150 refs = 0; in gup_huge_pud()
160 refs++; in gup_huge_pud()
163 get_head_page_multiple(head, refs); in gup_huge_pud()
/linux-4.1.27/arch/x86/mm/
Dgup.c123 int refs; in gup_huge_pmd() local
134 refs = 0; in gup_huge_pmd()
144 refs++; in gup_huge_pmd()
146 get_head_page_multiple(head, refs); in gup_huge_pmd()
200 int refs; in gup_huge_pud() local
211 refs = 0; in gup_huge_pud()
221 refs++; in gup_huge_pud()
223 get_head_page_multiple(head, refs); in gup_huge_pud()
/linux-4.1.27/fs/btrfs/
Ddelayed-inode.c56 atomic_set(&delayed_node->refs, 0); in btrfs_init_delayed_node()
96 atomic_inc(&node->refs); in btrfs_get_delayed_node()
104 atomic_inc(&node->refs); /* can be accessed */ in btrfs_get_delayed_node()
111 atomic_add(2, &node->refs); in btrfs_get_delayed_node()
141 atomic_add(2, &node->refs); in btrfs_get_or_create_delayed_node()
182 atomic_inc(&node->refs); /* inserted into list */ in btrfs_queue_delayed_node()
196 atomic_dec(&node->refs); /* not in the list */ in btrfs_dequeue_delayed_node()
217 atomic_inc(&node->refs); in btrfs_first_delayed_node()
244 atomic_inc(&next->refs); in btrfs_next_delayed_node()
269 if (atomic_dec_and_test(&delayed_node->refs)) { in __btrfs_release_delayed_node()
[all …]
Ddelayed-ref.h40 atomic_t refs; member
184 WARN_ON(atomic_read(&ref->refs) == 0); in btrfs_put_delayed_ref()
185 if (atomic_dec_and_test(&ref->refs)) { in btrfs_put_delayed_ref()
Dextent_map.c58 atomic_set(&em->refs, 1); in alloc_extent_map()
74 WARN_ON(atomic_read(&em->refs) == 0); in free_extent_map()
75 if (atomic_dec_and_test(&em->refs)) { in free_extent_map()
325 atomic_inc(&em->refs); in setup_extent_mapping()
384 atomic_inc(&em->refs); in __lookup_extent_mapping()
Ddelayed-inode.h70 atomic_t refs; member
83 atomic_t refs; member
Dordered-data.c216 atomic_set(&entry->refs, 1); in __btrfs_add_ordered_extent()
363 atomic_inc(&entry->refs); in btrfs_dec_test_first_ordered_pending()
427 atomic_inc(&entry->refs); in btrfs_dec_test_ordered_pending()
458 atomic_inc(&ordered->refs); in btrfs_get_logged_extents()
547 if (atomic_dec_and_test(&entry->refs)) { in btrfs_put_ordered_extent()
626 atomic_inc(&ordered->refs); in btrfs_wait_ordered_extents()
800 atomic_inc(&entry->refs); in btrfs_lookup_ordered_extent()
842 atomic_inc(&entry->refs); in btrfs_lookup_ordered_range()
865 atomic_inc(&entry->refs); in btrfs_lookup_first_ordered_extent()
Dextent_io.c70 atomic_read(&state->refs)); in btrfs_leak_debug_check()
79 eb->start, eb->len, atomic_read(&eb->refs)); in btrfs_leak_debug_check()
219 atomic_set(&state->refs, 1); in alloc_extent_state()
229 if (atomic_dec_and_test(&state->refs)) { in free_extent_state()
620 atomic_dec(&cached->refs); in clear_extent_bit()
771 atomic_inc(&state->refs); in wait_extent_bit()
811 atomic_inc(&state->refs); in cache_state_if_flags()
1561 atomic_inc(&state->refs); in find_delalloc_range()
2884 atomic_inc(&em->refs); in __get_extent_map()
2895 atomic_inc(&em->refs); in __get_extent_map()
[all …]
Ddisk-io.h103 if (atomic_inc_not_zero(&root->refs)) in btrfs_grab_fs_root()
110 if (atomic_dec_and_test(&root->refs)) in btrfs_put_fs_root()
Dextent-tree.c745 u64 offset, int metadata, u64 *refs, u64 *flags) in btrfs_lookup_extent_info() argument
837 atomic_inc(&head->node.refs); in btrfs_lookup_extent_info()
864 if (refs) in btrfs_lookup_extent_info()
865 *refs = num_refs; in btrfs_lookup_extent_info()
993 u64 refs; in convert_extent_item_v0() local
1002 refs = btrfs_extent_refs_v0(leaf, ei0); in convert_extent_item_v0()
1042 btrfs_set_extent_refs(leaf, item, refs); in convert_extent_item_v0()
1675 u64 refs; in setup_inline_extent_backref() local
1689 refs = btrfs_extent_refs(leaf, ei); in setup_inline_extent_backref()
1690 refs += refs_to_add; in setup_inline_extent_backref()
[all …]
Dextent_map.h36 atomic_t refs; member
Dextent_io.h110 atomic_t refs; member
129 atomic_t refs; member
289 atomic_inc(&eb->refs); in extent_buffer_get()
Dscrub.c67 atomic_t refs; member
82 atomic_t refs; member
115 atomic_t refs; /* free mem on transition to zero */ member
144 atomic_t refs; member
204 atomic_t refs; member
314 atomic_inc(&sctx->refs); in scrub_pending_bio_inc()
356 atomic_inc(&sctx->refs); in scrub_pending_trans_workers_inc()
447 if (atomic_dec_and_test(&sctx->refs)) in scrub_put_ctx()
475 atomic_set(&sctx->refs, 1); in scrub_setup_ctx()
869 atomic_inc(&recover->refs); in scrub_get_recover()
[all …]
Draid56.c151 atomic_t refs; member
391 atomic_dec(&rbio->refs); in __remove_rbio_from_cache()
482 atomic_inc(&rbio->refs); in cache_rbio()
678 atomic_dec(&cur->refs); in lock_stripe_add()
727 atomic_inc(&rbio->refs); in lock_stripe_add()
773 atomic_dec(&rbio->refs); in unlock_stripe()
790 atomic_inc(&next->refs); in unlock_stripe()
825 WARN_ON(atomic_read(&rbio->refs) < 0); in __free_raid_bio()
826 if (!atomic_dec_and_test(&rbio->refs)) in __free_raid_bio()
977 atomic_set(&rbio->refs, 1); in alloc_rbio()
Ddelayed-ref.c247 atomic_inc(&head->node.refs); in btrfs_delayed_ref_lock()
608 atomic_set(&ref->refs, 1); in add_delayed_ref_head()
675 atomic_set(&ref->refs, 1); in add_delayed_tree_ref()
738 atomic_set(&ref->refs, 1); in add_delayed_data_ref()
Dbackref.c889 u64 time_seq, struct ulist *refs, argument
950 atomic_inc(&head->node.refs);
1052 ret = ulist_add_merge_ptr(refs, ref->parent,
1628 struct ulist *refs = NULL; local
1649 tree_mod_seq_elem.seq, &refs,
1655 while (!ret && (ref_node = ulist_next(refs, &ref_uiter))) {
1674 free_leaf_list(refs);
Dordered-data.h115 atomic_t refs; member
Dctree.h621 __le64 refs; member
627 __le32 refs; member
774 __le32 refs; member
1917 atomic_t refs; member
2526 BTRFS_SETGET_FUNCS(extent_refs, struct btrfs_extent_item, refs, 64);
2531 BTRFS_SETGET_FUNCS(extent_refs_v0, struct btrfs_extent_item_v0, refs, 32);
2882 BTRFS_SETGET_FUNCS(disk_root_refs, struct btrfs_root_item, refs, 32);
2891 BTRFS_SETGET_STACK_FUNCS(root_refs, struct btrfs_root_item, refs, 32);
3362 u64 offset, int metadata, u64 *refs, u64 *flags);
Dvolumes.h298 atomic_t refs; member
Dctree.c162 if (atomic_inc_not_zero(&eb->refs)) { in btrfs_root_node()
983 u64 refs; in update_ref_for_cow() local
1009 &refs, &flags); in update_ref_for_cow()
1012 if (refs == 0) { in update_ref_for_cow()
1018 refs = 1; in update_ref_for_cow()
1030 if (refs > 1) { in update_ref_for_cow()
Dvolumes.c4467 atomic_inc(&em->refs); in __btrfs_alloc_chunk()
4923 atomic_set(&bbio->refs, 1); in alloc_btrfs_bio()
4930 WARN_ON(!atomic_read(&bbio->refs)); in btrfs_get_bbio()
4931 atomic_inc(&bbio->refs); in btrfs_get_bbio()
4938 if (atomic_dec_and_test(&bbio->refs)) in btrfs_put_bbio()
Ddisk-io.c1055 BUG_ON(!atomic_read(&eb->refs)); in btree_set_page_dirty()
1260 atomic_set(&root->refs, 1); in __setup_root()
4099 atomic_inc(&head->node.refs); in btrfs_destroy_delayed_refs()
Dsend.c3780 struct fs_path *name, void *ctx, struct list_head *refs) in record_ref() argument
3803 ret = __record_ref(refs, dir, gen, p); in record_ref()
Dtree-log.c4060 atomic_inc(&em->refs); in btrfs_log_changed_extents()
Dinode.c5046 atomic_inc(&state->refs); in evict_inode_truncate_pages()
/linux-4.1.27/drivers/staging/lustre/lnet/lnet/
Dlib-eq.c156 int **refs = NULL; in LNetEQFree() local
190 refs = eq->eq_refs; in LNetEQFree()
201 if (refs != NULL) in LNetEQFree()
202 cfs_percpt_free(refs); in LNetEQFree()
/linux-4.1.27/fs/btrfs/tests/
Dqgroup-tests.c93 u64 refs; in add_tree_ref() local
118 refs = btrfs_extent_refs(path->nodes[0], item); in add_tree_ref()
119 btrfs_set_extent_refs(path->nodes[0], item, refs + 1); in add_tree_ref()
177 u64 refs; in remove_extent_ref() local
202 refs = btrfs_extent_refs(path->nodes[0], item); in remove_extent_ref()
203 btrfs_set_extent_refs(path->nodes[0], item, refs - 1); in remove_extent_ref()
/linux-4.1.27/net/netfilter/
Dnf_conntrack_ecache.c42 struct nf_conn *refs[16]; in ecache_work_evict_list() local
63 refs[evicted] = ct; in ecache_work_evict_list()
65 if (++evicted >= ARRAY_SIZE(refs)) { in ecache_work_evict_list()
75 nf_ct_put(refs[--evicted]); in ecache_work_evict_list()
/linux-4.1.27/drivers/misc/sgi-xp/
Dxpc.h956 s32 refs = atomic_dec_return(&ch->references); in xpc_msgqueue_deref() local
958 DBUG_ON(refs < 0); in xpc_msgqueue_deref()
959 if (refs == 0) in xpc_msgqueue_deref()
973 s32 refs = atomic_dec_return(&part->references); in xpc_part_deref() local
975 DBUG_ON(refs < 0); in xpc_part_deref()
976 if (refs == 0 && part->setup_state == XPC_P_SS_WTEARDOWN) in xpc_part_deref()
/linux-4.1.27/drivers/net/ethernet/mellanox/mlx4/
Dport.c63 table->refs[i] = 0; in mlx4_init_mac_table()
76 table->refs[i] = 0; in mlx4_init_vlan_table()
110 if (table->refs[i] && in find_index()
149 if (!table->refs[i]) in mlx4_find_cached_mac()
174 if (!table->refs[i]) { in __mlx4_register_mac()
184 ++table->refs[i]; in __mlx4_register_mac()
207 table->refs[free] = 1; in __mlx4_register_mac()
270 if (--table->refs[index]) { in __mlx4_unregister_mac()
364 if (table->refs[i] && in mlx4_find_cached_vlan()
393 if (free < 0 && (table->refs[i] == 0)) { in __mlx4_register_vlan()
[all …]
Dmlx4.h727 int refs[MLX4_MAX_MAC_NUM]; member
749 int refs[MLX4_MAX_VLAN_NUM]; member
/linux-4.1.27/arch/powerpc/mm/
Dhugetlbpage.c1059 int refs; in gup_hugepte() local
1076 refs = 0; in gup_hugepte()
1086 refs++; in gup_hugepte()
1089 if (!page_cache_add_speculative(head, refs)) { in gup_hugepte()
1090 *nr -= refs; in gup_hugepte()
1096 *nr -= refs; in gup_hugepte()
1097 while (refs--) in gup_hugepte()
1106 while (refs--) { in gup_hugepte()
/linux-4.1.27/include/trace/events/
Dbtrfs.h185 __field( int, refs )
197 __entry->refs = atomic_read(&map->refs);
212 __entry->refs, __entry->compress_type)
244 __field( int, refs )
257 __entry->refs = atomic_read(&ordered->refs);
274 __entry->compress_type, __entry->refs)
693 __field( int, refs )
702 __entry->refs = atomic_read(&buf->refs);
711 __entry->refs,
/linux-4.1.27/drivers/gpu/drm/nouveau/include/nvkm/core/
Devent.h16 int *refs; member
/linux-4.1.27/include/uapi/xen/
Dgntdev.h60 struct ioctl_gntdev_grant_ref refs[1]; member
/linux-4.1.27/Documentation/devicetree/bindings/hwmon/
Dg762.txt25 at http://natisbad.org/NAS/refs/GMT_EDS-762_763-080710-0.2.pdf.
/linux-4.1.27/drivers/staging/lustre/include/linux/libcfs/
Dlibcfs_private.h393 void cfs_percpt_atomic_free(atomic_t **refs);
395 int cfs_percpt_atomic_summary(atomic_t **refs);
/linux-4.1.27/include/linux/
Drmap.h255 #define try_to_unmap(page, refs) SWAP_FAIL argument
Dhugetlb.h35 struct kref refs; member
/linux-4.1.27/kernel/
Daudit_tree.c29 atomic_long_t refs; member
122 if (atomic_long_dec_and_test(&chunk->refs)) in audit_put_chunk()
152 atomic_long_set(&chunk->refs, 1); in alloc_chunk()
193 atomic_long_inc(&p->refs); in audit_tree_lookup()
/linux-4.1.27/fs/ncpfs/
Dsock.c61 atomic_t refs; member
83 atomic_set(&req->refs, (1)); in ncp_alloc_req()
91 atomic_inc(&req->refs); in ncp_req_get()
96 if (atomic_dec_and_test(&req->refs)) in ncp_req_put()
/linux-4.1.27/tools/vm/
Dslabinfo.c30 int refs; member
1066 s->refs++; in link_slabs()
1084 if (!show_single_ref && a->slab->refs == 1) in alias()
1114 if (s->refs > 1 && !show_first_alias) in rename_slabs()
1171 slab->refs = 0; in read_slab_dir()
/linux-4.1.27/drivers/net/wireless/iwlwifi/mvm/
Ddebugfs.c1333 if (mvm->refs[ref]) \
1336 BIT(ref), mvm->refs[ref], #ref); \
1347 u32 refs = 0; in iwl_dbgfs_d0i3_refs_read() local
1350 if (mvm->refs[i]) in iwl_dbgfs_d0i3_refs_read()
1351 refs |= BIT(i); in iwl_dbgfs_d0i3_refs_read()
1354 refs); in iwl_dbgfs_d0i3_refs_read()
1395 taken = mvm->refs[IWL_MVM_REF_USER]; in iwl_dbgfs_d0i3_refs_write()
Dmac80211.c220 mvm->refs[ref_type]++; in iwl_mvm_ref()
232 WARN_ON(!mvm->refs[ref_type]--); in iwl_mvm_unref()
247 if (except_ref == i || !mvm->refs[i]) in iwl_mvm_unref_all_except()
251 i, mvm->refs[i]); in iwl_mvm_unref_all_except()
252 for (j = 0; j < mvm->refs[i]; j++) in iwl_mvm_unref_all_except()
254 mvm->refs[i] = 0; in iwl_mvm_unref_all_except()
269 if (mvm->refs[i]) { in iwl_mvm_ref_taken()
Dops.c584 mvm->refs[IWL_MVM_REF_UCODE_DOWN] = 1; in iwl_op_mode_mvm_start()
Dmvm.h716 u8 refs[IWL_MVM_REF_COUNT]; member
/linux-4.1.27/drivers/android/
Dbinder.c234 struct hlist_head refs; member
973 if (node->local_weak_refs || !hlist_empty(&node->refs)) in binder_dec_node()
982 if (hlist_empty(&node->refs) && !node->local_strong_refs && in binder_dec_node()
1076 hlist_add_head(&new_ref->node_entry, &node->refs); in binder_get_ref_for_node()
2270 int weak = !hlist_empty(&node->refs) || node->local_weak_refs || strong; in binder_thread_read()
3019 static int binder_node_release(struct binder_node *node, int refs) in binder_node_release() argument
3027 if (hlist_empty(&node->refs)) { in binder_node_release()
3031 return refs; in binder_node_release()
3039 hlist_for_each_entry(ref, &node->refs, node_entry) { in binder_node_release()
3040 refs++; in binder_node_release()
[all …]
/linux-4.1.27/drivers/staging/lustre/lustre/obdclass/
Dobd_config.c725 int refs; in class_decref() local
729 refs = atomic_read(&obd->obd_refcount); in class_decref()
733 CDEBUG(D_INFO, "Decref %s (%p) now %d\n", obd->obd_name, obd, refs); in class_decref()
735 if ((refs == 1) && obd->obd_stopping) { in class_decref()
748 if (refs == 0) { in class_decref()
/linux-4.1.27/tools/perf/util/
Dparse-events.l222 refs|Reference|ops|access |
/linux-4.1.27/drivers/tty/
Dtty_io.c606 int refs = 0; in tty_signal_session_leader() local
617 refs++; in tty_signal_session_leader()
642 return refs; in tty_signal_session_leader()
673 int refs; in __tty_hangup() local
712 refs = tty_signal_session_leader(tty, exit_session); in __tty_hangup()
714 while (refs--) in __tty_hangup()
/linux-4.1.27/fs/ocfs2/dlm/
Ddlmcommon.h297 struct kref refs; member
877 kref_get(&res->refs); in dlm_lockres_get()
Ddlmdebug.c109 res->last_used, atomic_read(&res->refs.refcount), in __dlm_print_one_lock_resource()
544 atomic_read(&res->refs.refcount)); in dump_lockres()
Ddlmmaster.c491 res = container_of(kref, struct dlm_lock_resource, refs); in dlm_lockres_release()
552 kref_put(&res->refs, dlm_lockres_release); in dlm_lockres_put()
588 kref_init(&res->refs); in dlm_init_lockres()
/linux-4.1.27/fs/hugetlbfs/
Dinode.c348 resv_map_release(&resv_map->refs); in hugetlbfs_evict_inode()
504 kref_put(&resv_map->refs, resv_map_release); in hugetlbfs_get_inode()
/linux-4.1.27/drivers/xen/
Dgntdev.c610 if (copy_from_user(map->grants, &u->refs, in gntdev_ioctl_map_grant_ref()
/linux-4.1.27/Documentation/development-process/
D7.AdvancedTopics40 own terminology and concepts; a new user of git should know about refs,
/linux-4.1.27/Documentation/filesystems/caching/
Dbackend-api.txt187 backend to do the operation. The backend may get extra refs to it by
188 calling fscache_get_retrieval() and refs may be discarded by calling
/linux-4.1.27/drivers/block/drbd/
Ddrbd_main.c2882 int refs = 3; in drbd_delete_device() local
2890 refs++; in drbd_delete_device()
2896 kref_sub(&device->kref, refs, drbd_destroy_device); in drbd_delete_device()
/linux-4.1.27/Documentation/networking/
Drxrpc.txt797 of. It is possible to get extra refs on all types of message for later
/linux-4.1.27/Documentation/filesystems/
Dvfs.txt1031 the case of failure. The vfsmount should be returned with 2 refs on
/linux-4.1.27/Documentation/virtual/uml/
DUserModeLinux-HOWTO.txt2729 nsyms = 57, ndeps = 0, syms = 0xa9023170, deps = 0x0, refs = 0x0,