Home
last modified time | relevance | path

Searched refs:atomic_read (Results 1 – 200 of 1522) sorted by relevance

12345678

/linux-4.4.14/fs/fscache/
Dstats.c147 atomic_read(&fscache_n_cookie_index), in fscache_stats_show()
148 atomic_read(&fscache_n_cookie_data), in fscache_stats_show()
149 atomic_read(&fscache_n_cookie_special)); in fscache_stats_show()
152 atomic_read(&fscache_n_object_alloc), in fscache_stats_show()
153 atomic_read(&fscache_n_object_no_alloc), in fscache_stats_show()
154 atomic_read(&fscache_n_object_avail), in fscache_stats_show()
155 atomic_read(&fscache_n_object_dead)); in fscache_stats_show()
157 atomic_read(&fscache_n_checkaux_none), in fscache_stats_show()
158 atomic_read(&fscache_n_checkaux_okay), in fscache_stats_show()
159 atomic_read(&fscache_n_checkaux_update), in fscache_stats_show()
[all …]
Dhistogram.c41 n[0] = atomic_read(&fscache_obj_instantiate_histogram[index]); in fscache_histogram_show()
42 n[1] = atomic_read(&fscache_ops_histogram[index]); in fscache_histogram_show()
43 n[2] = atomic_read(&fscache_objs_histogram[index]); in fscache_histogram_show()
44 n[3] = atomic_read(&fscache_retrieval_delay_histogram[index]); in fscache_histogram_show()
45 n[4] = atomic_read(&fscache_retrieval_histogram[index]); in fscache_histogram_show()
Doperation.c63 op->object->debug_id, op->debug_id, atomic_read(&op->usage)); in fscache_enqueue_operation()
68 ASSERTCMP(atomic_read(&op->usage), >, 0); in fscache_enqueue_operation()
159 ASSERTCMP(atomic_read(&op->usage), >, 0); in fscache_submit_exclusive_op()
241 object->debug_id, op->debug_id, atomic_read(&op->usage)); in fscache_submit_op()
244 ASSERTCMP(atomic_read(&op->usage), >, 0); in fscache_submit_op()
362 ASSERTCMP(atomic_read(&op->usage), >, 0); in fscache_cancel_op()
484 op->object->debug_id, op->debug_id, atomic_read(&op->usage)); in fscache_put_operation()
486 ASSERTCMP(atomic_read(&op->usage), >, 0); in fscache_put_operation()
572 ASSERTCMP(atomic_read(&op->usage), ==, 0); in fscache_operation_gc()
602 op->object->debug_id, op->debug_id, atomic_read(&op->usage)); in fscache_op_work_func()
Dcookie.c519 ASSERTCMP(atomic_read(&cookie->n_active), >, 0); in __fscache_disable_cookie()
521 if (atomic_read(&cookie->n_children) != 0) { in __fscache_disable_cookie()
596 atomic_read(&cookie->n_active), retire); in __fscache_relinquish_cookie()
609 ASSERTCMP(atomic_read(&cookie->parent->usage), >, 0); in __fscache_relinquish_cookie()
610 ASSERTCMP(atomic_read(&cookie->parent->n_children), >, 0); in __fscache_relinquish_cookie()
615 ASSERTCMP(atomic_read(&cookie->usage), >, 0); in __fscache_relinquish_cookie()
641 BUG_ON(atomic_read(&cookie->usage) <= 0); in __fscache_cookie_put()
/linux-4.4.14/fs/btrfs/
Dlocking.c45 if (atomic_read(&eb->blocking_writers) == 0) { in btrfs_set_lock_blocking_rw()
46 WARN_ON(atomic_read(&eb->spinning_writers) != 1); in btrfs_set_lock_blocking_rw()
55 WARN_ON(atomic_read(&eb->spinning_readers) == 0); in btrfs_set_lock_blocking_rw()
78 BUG_ON(atomic_read(&eb->blocking_writers) != 1); in btrfs_clear_lock_blocking_rw()
80 WARN_ON(atomic_read(&eb->spinning_writers)); in btrfs_clear_lock_blocking_rw()
89 BUG_ON(atomic_read(&eb->blocking_readers) == 0); in btrfs_clear_lock_blocking_rw()
109 BUG_ON(!atomic_read(&eb->blocking_writers) && in btrfs_tree_read_lock()
113 if (atomic_read(&eb->blocking_writers) && in btrfs_tree_read_lock()
126 if (atomic_read(&eb->blocking_writers)) { in btrfs_tree_read_lock()
129 atomic_read(&eb->blocking_writers) == 0); in btrfs_tree_read_lock()
[all …]
Dreada.c768 if (atomic_read(&device->reada_in_flight) < in __reada_start_machine()
820 atomic_read(&device->reada_in_flight)); in dump_devs()
963 while (atomic_read(&rc->elems)) { in btrfs_reada_wait()
964 wait_event_timeout(rc->wait, atomic_read(&rc->elems) == 0, in btrfs_reada_wait()
967 atomic_read(&rc->elems) < 10 ? 1 : 0); in btrfs_reada_wait()
970 dump_devs(rc->root->fs_info, atomic_read(&rc->elems) < 10 ? 1 : 0); in btrfs_reada_wait()
981 while (atomic_read(&rc->elems)) { in btrfs_reada_wait()
982 wait_event(rc->wait, atomic_read(&rc->elems) == 0); in btrfs_reada_wait()
/linux-4.4.14/net/netfilter/ipvs/
Dip_vs_nq.c50 return atomic_read(&dest->activeconns) + 1; in ip_vs_nq_dest_overhead()
82 !atomic_read(&dest->weight)) in ip_vs_nq_schedule()
88 if (atomic_read(&dest->activeconns) == 0) { in ip_vs_nq_schedule()
95 ((__s64)loh * atomic_read(&dest->weight) > in ip_vs_nq_schedule()
96 (__s64)doh * atomic_read(&least->weight))) { in ip_vs_nq_schedule()
112 atomic_read(&least->activeconns), in ip_vs_nq_schedule()
113 atomic_read(&least->refcnt), in ip_vs_nq_schedule()
114 atomic_read(&least->weight), loh); in ip_vs_nq_schedule()
Dip_vs_sed.c54 return atomic_read(&dest->activeconns) + 1; in ip_vs_sed_dest_overhead()
85 atomic_read(&dest->weight) > 0) { in ip_vs_sed_schedule()
102 if ((__s64)loh * atomic_read(&dest->weight) > in ip_vs_sed_schedule()
103 (__s64)doh * atomic_read(&least->weight)) { in ip_vs_sed_schedule()
113 atomic_read(&least->activeconns), in ip_vs_sed_schedule()
114 atomic_read(&least->refcnt), in ip_vs_sed_schedule()
115 atomic_read(&least->weight), loh); in ip_vs_sed_schedule()
Dip_vs_lblcr.c177 if ((atomic_read(&least->weight) > 0) in ip_vs_dest_set_min()
193 if (((__s64)loh * atomic_read(&dest->weight) > in ip_vs_dest_set_min()
194 (__s64)doh * atomic_read(&least->weight)) in ip_vs_dest_set_min()
206 atomic_read(&least->activeconns), in ip_vs_dest_set_min()
207 atomic_read(&least->refcnt), in ip_vs_dest_set_min()
208 atomic_read(&least->weight), loh); in ip_vs_dest_set_min()
226 if (atomic_read(&most->weight) > 0) { in ip_vs_dest_set_max()
239 if (((__s64)moh * atomic_read(&dest->weight) < in ip_vs_dest_set_max()
240 (__s64)doh * atomic_read(&most->weight)) in ip_vs_dest_set_max()
241 && (atomic_read(&dest->weight) > 0)) { in ip_vs_dest_set_max()
[all …]
Dip_vs_wlc.c57 atomic_read(&dest->weight) > 0) { in ip_vs_wlc_schedule()
74 if ((__s64)loh * atomic_read(&dest->weight) > in ip_vs_wlc_schedule()
75 (__s64)doh * atomic_read(&least->weight)) { in ip_vs_wlc_schedule()
85 atomic_read(&least->activeconns), in ip_vs_wlc_schedule()
86 atomic_read(&least->refcnt), in ip_vs_wlc_schedule()
87 atomic_read(&least->weight), loh); in ip_vs_wlc_schedule()
Dip_vs_fo.c39 atomic_read(&dest->weight) > hw) { in ip_vs_fo_schedule()
41 hw = atomic_read(&dest->weight); in ip_vs_fo_schedule()
49 atomic_read(&hweight->activeconns), in ip_vs_fo_schedule()
50 atomic_read(&hweight->weight)); in ip_vs_fo_schedule()
Dip_vs_lblc.c314 if (atomic_read(&tbl->entries) <= tbl->max_size) { in ip_vs_lblc_check_expire()
319 goal = (atomic_read(&tbl->entries) - tbl->max_size)*4/3; in ip_vs_lblc_check_expire()
422 if (atomic_read(&dest->weight) > 0) { in __ip_vs_lblc_schedule()
439 if ((__s64)loh * atomic_read(&dest->weight) > in __ip_vs_lblc_schedule()
440 (__s64)doh * atomic_read(&least->weight)) { in __ip_vs_lblc_schedule()
450 atomic_read(&least->activeconns), in __ip_vs_lblc_schedule()
451 atomic_read(&least->refcnt), in __ip_vs_lblc_schedule()
452 atomic_read(&least->weight), loh); in __ip_vs_lblc_schedule()
465 if (atomic_read(&dest->activeconns) > atomic_read(&dest->weight)) { in is_overloaded()
469 if (atomic_read(&d->activeconns)*2 in is_overloaded()
[all …]
Dip_vs_ovf.c41 w = atomic_read(&dest->weight); in ip_vs_ovf_schedule()
43 atomic_read(&dest->activeconns) > w || in ip_vs_ovf_schedule()
56 atomic_read(&h->activeconns), in ip_vs_ovf_schedule()
57 atomic_read(&h->weight)); in ip_vs_ovf_schedule()
Dip_vs_wrr.c81 weight = atomic_read(&dest->weight); in ip_vs_wrr_gcd_weight()
102 new_weight = atomic_read(&dest->weight); in ip_vs_wrr_max_weight()
186 atomic_read(&dest->weight) >= mark->cw) in ip_vs_wrr_schedule()
220 atomic_read(&dest->activeconns), in ip_vs_wrr_schedule()
221 atomic_read(&dest->refcnt), in ip_vs_wrr_schedule()
222 atomic_read(&dest->weight)); in ip_vs_wrr_schedule()
Dip_vs_rr.c76 atomic_read(&dest->weight) > 0) in ip_vs_rr_schedule()
99 atomic_read(&dest->activeconns), in ip_vs_rr_schedule()
100 atomic_read(&dest->refcnt), atomic_read(&dest->weight)); in ip_vs_rr_schedule()
Dip_vs_lc.c48 atomic_read(&dest->weight) == 0) in ip_vs_lc_schedule()
64 atomic_read(&least->activeconns), in ip_vs_lc_schedule()
65 atomic_read(&least->inactconns)); in ip_vs_lc_schedule()
Dip_vs_conn.c250 ret = atomic_read(&cp->refcnt) ? false : true; in ip_vs_conn_unlink()
301 if (!cp && atomic_read(&ip_vs_conn_no_cport_cnt)) { in ip_vs_conn_in_get()
556 return atomic_read(&dest->activeconns) in ip_vs_dest_totalconns()
557 + atomic_read(&dest->inactconns); in ip_vs_dest_totalconns()
577 conn_flags = atomic_read(&dest->conn_flags); in ip_vs_bind_dest()
603 cp->flags, atomic_read(&cp->refcnt), in ip_vs_bind_dest()
604 atomic_read(&dest->refcnt)); in ip_vs_bind_dest()
674 if (pd && atomic_read(&pd->appcnt)) in ip_vs_try_bind_dest()
700 cp->flags, atomic_read(&cp->refcnt), in ip_vs_unbind_dest()
701 atomic_read(&dest->refcnt)); in ip_vs_unbind_dest()
[all …]
Dip_vs_sh.c81 return atomic_read(&dest->weight) <= 0 || in is_unavailable()
196 atomic_read(&dest->weight)); in ip_vs_sh_reassign()
199 if (++d_count >= atomic_read(&dest->weight)) { in ip_vs_sh_reassign()
/linux-4.4.14/drivers/staging/lustre/include/linux/libcfs/
Dlibcfs_private.h223 LASSERTF(atomic_read(a) == v, \
224 "value: %d\n", atomic_read((a))); \
230 LASSERTF(atomic_read(a) != v, \
231 "value: %d\n", atomic_read((a))); \
237 LASSERTF(atomic_read(a) < v, \
238 "value: %d\n", atomic_read((a))); \
244 LASSERTF(atomic_read(a) <= v, \
245 "value: %d\n", atomic_read((a))); \
251 LASSERTF(atomic_read(a) > v, \
252 "value: %d\n", atomic_read((a))); \
[all …]
/linux-4.4.14/arch/mips/kernel/
Dsync-r4k.c61 while (atomic_read(&count_count_start) != 1) in synchronise_count_master()
78 while (atomic_read(&count_count_stop) != 1) in synchronise_count_master()
108 while (atomic_read(&count_start_flag) != cpu) in synchronise_count_slave()
112 initcount = atomic_read(&count_reference); in synchronise_count_slave()
116 while (atomic_read(&count_count_start) != 2) in synchronise_count_slave()
126 while (atomic_read(&count_count_stop) != 2) in synchronise_count_slave()
Dspinlock_test.c66 while (atomic_read(&s->enter_wait)) in multi_other()
73 while (atomic_read(&s->start_wait)) in multi_other()
85 while (atomic_read(&s->exit_wait)) in multi_other()
/linux-4.4.14/drivers/staging/wilc1000/
Dlinux_wlan_common.h54 if ((atomic_read(&DEBUG_LEVEL) & DEBUG) && \
55 ((atomic_read(&REGION)) & (region))) { \
63 if ((atomic_read(&DEBUG_LEVEL) & INFO) && \
64 ((atomic_read(&REGION)) & (region))) { \
72 if ((atomic_read(&DEBUG_LEVEL) & WRN) && \
73 ((atomic_read(&REGION)) & (region))) { \
81 if ((atomic_read(&DEBUG_LEVEL) & ERR)) { \
Dwilc_debugfs.c46 res = scnprintf(buf, sizeof(buf), "Debug Level: %x\n", atomic_read(&DEBUG_LEVEL)); in wilc_debug_level_read()
62 …x%08x) is out of range, stay previous flag (0x%08x)\n", __func__, flag, atomic_read(&DEBUG_LEVEL)); in wilc_debug_level_write()
85 res = scnprintf(buf, sizeof(buf), "Debug region: %x\n", atomic_read(&REGION)); in wilc_debug_region_read()
105 …ue (0x%08x) is out of range, stay previous flag (0x%08x)\n", __func__, flag, atomic_read(&REGION)); in wilc_debug_region_write()
110 printk("new debug-region is %x\n", atomic_read(&REGION)); in wilc_debug_region_write()
/linux-4.4.14/tools/perf/tests/
Dthread-mg-share.c46 TEST_ASSERT_EQUAL("wrong refcnt", atomic_read(&mg->refcnt), 4); in test__thread_mg_share()
74 TEST_ASSERT_EQUAL("wrong refcnt", atomic_read(&other_mg->refcnt), 2); in test__thread_mg_share()
80 TEST_ASSERT_EQUAL("wrong refcnt", atomic_read(&mg->refcnt), 3); in test__thread_mg_share()
83 TEST_ASSERT_EQUAL("wrong refcnt", atomic_read(&mg->refcnt), 2); in test__thread_mg_share()
86 TEST_ASSERT_EQUAL("wrong refcnt", atomic_read(&mg->refcnt), 1); in test__thread_mg_share()
92 TEST_ASSERT_EQUAL("wrong refcnt", atomic_read(&other_mg->refcnt), 1); in test__thread_mg_share()
Dthread-map.c24 atomic_read(&map->refcnt) == 1); in test__thread_map()
39 atomic_read(&map->refcnt) == 1); in test__thread_map()
/linux-4.4.14/sound/core/seq/
Dseq_lock.c33 if (atomic_read(lockp) < 0) { in snd_use_lock_sync_helper()
34 pr_warn("ALSA: seq_lock: lock trouble [counter = %d] in %s:%d\n", atomic_read(lockp), file, line); in snd_use_lock_sync_helper()
37 while (atomic_read(lockp) > 0) { in snd_use_lock_sync_helper()
39 pr_warn("ALSA: seq_lock: timeout [%d left] in %s:%d\n", atomic_read(lockp), file, line); in snd_use_lock_sync_helper()
Dseq_memory.c37 return pool->total_elements - atomic_read(&pool->counter); in snd_seq_pool_available()
267 used = atomic_read(&pool->counter); in snd_seq_cell_alloc()
435 while (atomic_read(&pool->counter) > 0) { in snd_seq_pool_done()
437 pr_warn("ALSA: snd_seq_pool_done timeout: %d cells remain\n", atomic_read(&pool->counter)); in snd_seq_pool_done()
518 snd_iprintf(buffer, "%sCells in use : %d\n", space, atomic_read(&pool->counter)); in snd_seq_info_pool()
/linux-4.4.14/include/asm-generic/
Dqspinlock.h53 return atomic_read(&lock->val) & _Q_LOCKED_MASK; in queued_spin_is_locked()
68 return !atomic_read(&lock.val); in queued_spin_value_unlocked()
78 return atomic_read(&lock->val) & ~_Q_LOCKED_MASK; in queued_spin_is_contended()
87 if (!atomic_read(&lock->val) && in queued_spin_trylock()
135 while (atomic_read(&lock->val) & _Q_LOCKED_MASK) in queued_spin_unlock_wait()
Dqrwlock.h48 return !(atomic_read(&lock->cnts) & _QW_WMASK); in queued_read_can_lock()
57 return !atomic_read(&lock->cnts); in queued_write_can_lock()
69 cnts = atomic_read(&lock->cnts); in queued_read_trylock()
88 cnts = atomic_read(&lock->cnts); in queued_write_trylock()
Datomic.h129 #ifndef atomic_read
130 #define atomic_read(v) READ_ONCE((v)->counter) macro
182 c = atomic_read(v); in __atomic_add_unless()
/linux-4.4.14/drivers/staging/lustre/lustre/osc/
Dosc_page.c401 atomic_read(&obj->oo_nr_reads), in osc_page_print()
403 atomic_read(&obj->oo_nr_writes), in osc_page_print()
597 int pages = atomic_read(&cli->cl_lru_in_list) >> 1; in osc_cache_too_much()
599 if (atomic_read(&osc_lru_waiters) > 0 && in osc_cache_too_much()
600 atomic_read(cli->cl_lru_left) < lru_shrink_max) in osc_cache_too_much()
606 if (atomic_read(cli->cl_lru_left) < cache->ccc_lru_max >> 4) { in osc_cache_too_much()
609 tmp = cache->ccc_lru_max / atomic_read(&cache->ccc_users); in osc_cache_too_much()
663 LASSERT(atomic_read(&cli->cl_lru_in_list) >= 0); in osc_lru_shrink()
664 if (atomic_read(&cli->cl_lru_in_list) == 0 || target <= 0) in osc_lru_shrink()
676 maxscan = min(target << 1, atomic_read(&cli->cl_lru_in_list)); in osc_lru_shrink()
[all …]
Dlproc_osc.c111 req_count = atomic_read(&osc_pool_req_count); in max_rpcs_in_flight_store()
190 (atomic_read(&cli->cl_lru_in_list) + in osc_cached_mb_seq_show()
191 atomic_read(&cli->cl_lru_busy)) >> shift, in osc_cached_mb_seq_show()
192 atomic_read(&cli->cl_lru_busy)); in osc_cached_mb_seq_show()
224 rc = atomic_read(&cli->cl_lru_in_list) - pages_number; in osc_cached_mb_seq_write()
441 return sprintf(buf, "%u\n", atomic_read(&obd->u.cli.cl_resends)); in resend_count_show()
522 atomic_read(&obd->u.cli.cl_destroy_in_flight)); in destroys_in_flight_show()
618 atomic_read(&cli->cl_pending_w_pages)); in osc_rpc_stats_seq_show()
620 atomic_read(&cli->cl_pending_r_pages)); in osc_rpc_stats_seq_show()
Dosc_cache.c115 atomic_read(&__ext->oe_refc), \
116 atomic_read(&__ext->oe_users), \
191 if (atomic_read(&ext->oe_refc) <= 0) { in osc_extent_sanity_check0()
196 if (atomic_read(&ext->oe_refc) < atomic_read(&ext->oe_users)) { in osc_extent_sanity_check0()
209 if (atomic_read(&ext->oe_users) == 0) { in osc_extent_sanity_check0()
232 if (atomic_read(&ext->oe_users) > 0) { in osc_extent_sanity_check0()
373 LASSERT(atomic_read(&ext->oe_refc) >= 0); in osc_extent_get()
380 LASSERT(atomic_read(&ext->oe_refc) > 0); in osc_extent_put()
383 LASSERT(atomic_read(&ext->oe_users) == 0); in osc_extent_put()
402 LASSERT(atomic_read(&ext->oe_refc) > 1); in osc_extent_put_trust()
[all …]
/linux-4.4.14/net/batman-adv/
Dgateway_common.c166 gw_mode = atomic_read(&bat_priv->gw_mode); in batadv_gw_tvlv_container_update()
174 down = atomic_read(&bat_priv->gw.bandwidth_down); in batadv_gw_tvlv_container_update()
175 up = atomic_read(&bat_priv->gw.bandwidth_up); in batadv_gw_tvlv_container_update()
194 down_curr = (unsigned int)atomic_read(&bat_priv->gw.bandwidth_down); in batadv_gw_bandwidth_set()
195 up_curr = (unsigned int)atomic_read(&bat_priv->gw.bandwidth_up); in batadv_gw_bandwidth_set()
263 (atomic_read(&bat_priv->gw_mode) == BATADV_GW_MODE_CLIENT) && in batadv_gw_tvlv_ogm_handler_v1()
264 (atomic_read(&bat_priv->gw_sel_class) > 2)) in batadv_gw_tvlv_ogm_handler_v1()
Dsysfs.c160 atomic_read(&bat_priv->_name) == 0 ? \
192 return sprintf(buff, "%i\n", atomic_read(&bat_priv->_var)); \
228 atomic_read(&vlan->_name) == 0 ? \
267 if (atomic_read(attr) == enabled) in batadv_store_bool_attr()
271 atomic_read(attr) == 1 ? "enabled" : "disabled", in batadv_store_bool_attr()
322 if (atomic_read(attr) == uint_val) in batadv_store_uint_attr()
326 attr_name, atomic_read(attr), uint_val); in batadv_store_uint_attr()
370 switch (atomic_read(&bat_priv->gw_mode)) { in batadv_show_gw_mode()
419 if (atomic_read(&bat_priv->gw_mode) == gw_mode_tmp) in batadv_store_gw_mode()
422 switch (atomic_read(&bat_priv->gw_mode)) { in batadv_store_gw_mode()
[all …]
Dbridge_loop_avoidance.c535 if (!atomic_read(&backbone_gw->request_sent)) { in batadv_bla_send_request()
694 if (atomic_read(&backbone_gw->request_sent)) { in batadv_handle_announce()
1034 if (atomic_read(&backbone_gw->request_sent)) in batadv_bla_purge_backbone_gw()
1118 if (!atomic_read(&bat_priv->bridge_loop_avoidance)) in batadv_bla_update_orig_address()
1177 if (!atomic_read(&bat_priv->bridge_loop_avoidance)) in batadv_bla_periodic_work()
1206 if (atomic_read(&backbone_gw->request_sent) == 0) in batadv_bla_periodic_work()
1375 if (!atomic_read(&bat_priv->bridge_loop_avoidance)) in batadv_bla_is_backbone_gw_orig()
1414 if (!atomic_read(&orig_node->bat_priv->bridge_loop_avoidance)) in batadv_bla_is_backbone_gw()
1484 if (!atomic_read(&bat_priv->bridge_loop_avoidance)) in batadv_bla_rx()
1487 if (unlikely(atomic_read(&bat_priv->bla.num_requests))) in batadv_bla_rx()
[all …]
Dmulticast.c378 if (!atomic_read(&bat_priv->multicast_mode)) in batadv_mcast_forw_mode_check()
381 if (atomic_read(&bat_priv->mcast.num_disabled)) in batadv_mcast_forw_mode_check()
410 return atomic_read(&bat_priv->mcast.num_want_all_ipv4); in batadv_mcast_forw_want_all_ip_count()
412 return atomic_read(&bat_priv->mcast.num_want_all_ipv6); in batadv_mcast_forw_want_all_ip_count()
570 atomic_read(&bat_priv->mcast.num_want_all_unsnoopables); in batadv_mcast_forw_mode()
Dgateway_client.c178 switch (atomic_read(&bat_priv->gw_sel_class)) { in batadv_gw_get_best_gw_node()
241 if (atomic_read(&bat_priv->gw_mode) != BATADV_GW_MODE_CLIENT) in batadv_gw_check_client_stop()
269 if (atomic_read(&bat_priv->gw_mode) != BATADV_GW_MODE_CLIENT) in batadv_gw_election()
388 if ((atomic_read(&bat_priv->gw_sel_class) > 3) && in batadv_gw_check_election()
389 (orig_tq_avg - gw_tq_avg < atomic_read(&bat_priv->gw_sel_class))) in batadv_gw_check_election()
810 switch (atomic_read(&bat_priv->gw_mode)) { in batadv_gw_out_of_range()
Dtranslation-table.c237 count = atomic_read(&tt_global_entry->orig_list_count); in batadv_tt_global_hash_count()
473 tt_local_entries += atomic_read(&vlan->tt.num_entries); in batadv_tt_local_table_transmit_size()
586 packet_size_max = atomic_read(&bat_priv->packet_size_max); in batadv_tt_local_add()
610 (u8)atomic_read(&bat_priv->tt.vn)); in batadv_tt_local_add()
746 num_entries += atomic_read(&vlan->tt.num_entries); in batadv_tt_prepare_tvlv_global_data()
766 (*tt_data)->ttvn = atomic_read(&orig_node->last_ttvn); in batadv_tt_prepare_tvlv_global_data()
820 num_entries += atomic_read(&vlan->tt.num_entries); in batadv_tt_prepare_tvlv_local_data()
840 (*tt_data)->ttvn = atomic_read(&bat_priv->tt.vn); in batadv_tt_prepare_tvlv_local_data()
874 tt_diff_entries_num = atomic_read(&bat_priv->tt.local_changes); in batadv_tt_tvlv_container_update()
959 net_dev->name, (u8)atomic_read(&bat_priv->tt.vn)); in batadv_tt_local_seq_print_text()
[all …]
/linux-4.4.14/kernel/
Dcred.c77 return atomic_read(&cred->subscribers); in read_cred_subscribers()
103 atomic_read(&cred->usage) != 0 || in put_cred_rcu()
108 atomic_read(&cred->usage), in put_cred_rcu()
111 if (atomic_read(&cred->usage) != 0) in put_cred_rcu()
113 cred, atomic_read(&cred->usage)); in put_cred_rcu()
137 atomic_read(&cred->usage), in __put_cred()
140 BUG_ON(atomic_read(&cred->usage) != 0); in __put_cred()
161 atomic_read(&tsk->cred->usage), in exit_creds()
337 p->cred, atomic_read(&p->cred->usage), in copy_creds()
428 atomic_read(&new->usage), in commit_creds()
[all …]
Dsmpboot.c381 return atomic_read(&per_cpu(cpu_hotplug_state, cpu)); in cpu_report_state()
403 switch (atomic_read(&per_cpu(cpu_hotplug_state, cpu))) { in cpu_check_up_prepare()
472 if (atomic_read(&per_cpu(cpu_hotplug_state, cpu)) == CPU_DEAD) in cpu_wait_death()
477 while (atomic_read(&per_cpu(cpu_hotplug_state, cpu)) != CPU_DEAD) { in cpu_wait_death()
485 oldstate = atomic_read(&per_cpu(cpu_hotplug_state, cpu)); in cpu_wait_death()
516 oldstate = atomic_read(&per_cpu(cpu_hotplug_state, cpu)); in cpu_report_death()
/linux-4.4.14/drivers/s390/scsi/
Dzfcp_erp.c95 if (atomic_read(&zfcp_sdev->status) & ZFCP_STATUS_COMMON_ERP_INUSE) in zfcp_erp_action_dismiss_lun()
103 if (atomic_read(&port->status) & ZFCP_STATUS_COMMON_ERP_INUSE) in zfcp_erp_action_dismiss_port()
118 if (atomic_read(&adapter->status) & ZFCP_STATUS_COMMON_ERP_INUSE) in zfcp_erp_action_dismiss_adapter()
139 l_status = atomic_read(&zfcp_sdev->status); in zfcp_erp_required_act()
142 p_status = atomic_read(&port->status); in zfcp_erp_required_act()
150 p_status = atomic_read(&port->status); in zfcp_erp_required_act()
155 p_status = atomic_read(&port->status); in zfcp_erp_required_act()
158 a_status = atomic_read(&adapter->status); in zfcp_erp_required_act()
168 a_status = atomic_read(&adapter->status); in zfcp_erp_required_act()
199 if (!(atomic_read(&zfcp_sdev->status) & in zfcp_erp_setup_act()
[all …]
Dzfcp_sysfs.c59 ZFCP_DEFINE_A_ATTR(status, "0x%08x\n", atomic_read(&adapter->status));
68 ZFCP_DEFINE_A_ATTR(in_recovery, "%d\n", (atomic_read(&adapter->status) &
72 atomic_read(&port->status));
74 (atomic_read(&port->status) &
95 if (atomic_read(&port->status) & ZFCP_STATUS_COMMON_ERP_FAILED) in zfcp_sysfs_port_failed_show()
131 status = atomic_read(&sdev_to_zfcp(sdev)->status); in zfcp_sysfs_unit_failed_show()
176 if (atomic_read(&adapter->status) & ZFCP_STATUS_COMMON_ERP_FAILED) in zfcp_sysfs_adapter_failed_show()
263 if (atomic_read(&port->units) > 0) { in zfcp_sysfs_port_remove_store()
465 (atomic_read(&zfcp_sdev->status) &
473 unsigned int status = atomic_read(&sdev_to_zfcp(sdev)->status); in zfcp_sysfs_scsi_zfcp_failed_show()
[all …]
Dzfcp_qdio.c56 used = QDIO_MAX_BUFFERS_PER_Q - atomic_read(&qdio->req_q_free); in zfcp_qdio_account()
210 if (atomic_read(&qdio->req_q_free) || in zfcp_qdio_sbal_check()
211 !(atomic_read(&qdio->adapter->status) & ZFCP_STATUS_ADAPTER_QDIOUP)) in zfcp_qdio_sbal_check()
233 if (!(atomic_read(&qdio->adapter->status) & ZFCP_STATUS_ADAPTER_QDIOUP)) in zfcp_qdio_sbal_get()
347 if (!(atomic_read(&adapter->status) & ZFCP_STATUS_ADAPTER_QDIOUP)) in zfcp_qdio_close()
360 count = atomic_read(&qdio->req_q_free); in zfcp_qdio_close()
384 if (atomic_read(&adapter->status) & ZFCP_STATUS_ADAPTER_QDIOUP) in zfcp_qdio_open()
497 if (atomic_read(&adapter->status) & ZFCP_STATUS_ADAPTER_SIOSL_ISSUED) in zfcp_qdio_siosl()
Dzfcp_dbf.c235 rec->adapter_status = atomic_read(&adapter->status); in zfcp_dbf_set_common()
237 rec->port_status = atomic_read(&port->status); in zfcp_dbf_set_common()
242 rec->lun_status = atomic_read(&sdev_to_zfcp(sdev)->status); in zfcp_dbf_set_common()
313 atomic_read(&sdev_to_zfcp(erp->sdev)->erp_counter); in zfcp_dbf_rec_run()
315 rec->u.run.rec_count = atomic_read(&erp->port->erp_counter); in zfcp_dbf_rec_run()
317 rec->u.run.rec_count = atomic_read(&erp->adapter->erp_counter); in zfcp_dbf_rec_run()
/linux-4.4.14/drivers/staging/lustre/lustre/ptlrpc/
Dconnection.c84 conn, atomic_read(&conn->c_refcount), in ptlrpc_connection_get()
97 LASSERT(atomic_read(&conn->c_refcount) > 1); in ptlrpc_connection_put()
119 conn, atomic_read(&conn->c_refcount), in ptlrpc_connection_put()
131 conn, atomic_read(&conn->c_refcount), in ptlrpc_connection_addref()
227 LASSERTF(atomic_read(&conn->c_refcount) == 0, in conn_exit()
229 atomic_read(&conn->c_refcount)); in conn_exit()
Dimport.c313 (atomic_read(&imp->imp_inflight) == 0), in ptlrpc_invalidate_import()
320 atomic_read(&imp->imp_inflight)); in ptlrpc_invalidate_import()
323 if (atomic_read(&imp->imp_inflight) == 0) { in ptlrpc_invalidate_import()
324 int count = atomic_read(&imp->imp_unregistering); in ptlrpc_invalidate_import()
360 atomic_read(&imp-> in ptlrpc_invalidate_import()
371 LASSERT(atomic_read(&imp->imp_inflight) == 0); in ptlrpc_invalidate_import()
453 if (atomic_read(&imp->imp_inval_count) > 0) { in ptlrpc_reconnect_import()
458 (atomic_read(&imp->imp_inval_count) == 0), in ptlrpc_reconnect_import()
462 atomic_read(&imp->imp_inval_count)); in ptlrpc_reconnect_import()
1241 LASSERT(atomic_read(&imp->imp_replay_inflight) == 0); in signal_completed_replay()
[all …]
Dptlrpcd.c217 rc = atomic_read(&src->set_new_count); in ptlrpcd_steal_rqset()
285 if (atomic_read(&set->set_new_count)) { in ptlrpcd_check()
290 atomic_add(atomic_read(&set->set_new_count), in ptlrpcd_check()
320 if (atomic_read(&set->set_remaining)) in ptlrpcd_check()
339 rc = atomic_read(&set->set_new_count); in ptlrpcd_check()
365 if (atomic_read(&ps->set_new_count)) { in ptlrpcd_check()
/linux-4.4.14/fs/afs/
Dserver.c94 _leave(" = %p{%d}", server, atomic_read(&server->usage)); in afs_alloc_server()
143 _leave(" = %p{%d}", server, atomic_read(&server->usage)); in afs_lookup_server()
157 _leave(" = %p{%d}", server, atomic_read(&server->usage)); in afs_lookup_server()
224 _enter("%p{%d}", server, atomic_read(&server->usage)); in afs_put_server()
226 _debug("PUT SERVER %d", atomic_read(&server->usage)); in afs_put_server()
228 ASSERTCMP(atomic_read(&server->usage), >, 0); in afs_put_server()
238 if (atomic_read(&server->usage) == 0) { in afs_put_server()
261 ASSERTCMP(atomic_read(&server->cb_break_n), ==, 0); in afs_destroy_server()
294 if (atomic_read(&server->usage) > 0) { in afs_reap_server()
Dcell.c348 _enter("%p{%d,%s}", cell, atomic_read(&cell->usage), cell->name); in afs_put_cell()
350 ASSERTCMP(atomic_read(&cell->usage), >, 0); in afs_put_cell()
379 _enter("%p{%d,%s}", cell, atomic_read(&cell->usage), cell->name); in afs_cell_destroy()
381 ASSERTCMP(atomic_read(&cell->usage), >=, 0); in afs_cell_destroy()
385 if (atomic_read(&cell->usage) > 0) { in afs_cell_destroy()
392 while (atomic_read(&cell->usage) > 0) { in afs_cell_destroy()
402 ASSERTCMP(atomic_read(&cell->usage), ==, 0); in afs_cell_destroy()
451 cell->name, atomic_read(&cell->usage)); in afs_cell_purge()
Drxrpc.c134 ASSERTCMP(atomic_read(&afs_outstanding_skbs), ==, 0); in afs_close_socket()
135 ASSERTCMP(atomic_read(&afs_outstanding_calls), ==, 0); in afs_close_socket()
146 _debug("DLVR NULL [%d]", atomic_read(&afs_outstanding_skbs)); in afs_data_delivered()
150 skb, skb->mark, atomic_read(&afs_outstanding_skbs)); in afs_data_delivered()
163 _debug("FREE NULL [%d]", atomic_read(&afs_outstanding_skbs)); in afs_free_skb()
167 skb, skb->mark, atomic_read(&afs_outstanding_skbs)); in afs_free_skb()
180 call, call->type->name, atomic_read(&afs_outstanding_calls)); in afs_free_call()
228 call, type->name, atomic_read(&afs_outstanding_calls)); in afs_alloc_flat_call()
355 atomic_read(&afs_outstanding_calls)); in afs_make_call()
435 skb, skb->mark, atomic_read(&afs_outstanding_skbs)); in afs_rx_interceptor()
[all …]
/linux-4.4.14/net/rxrpc/
Dar-peer.c123 atomic_read(&peer->usage), in rxrpc_get_peer()
127 if (atomic_read(&peer->usage) > 0 && in rxrpc_get_peer()
148 if (atomic_read(&peer->usage) > 0 && in rxrpc_get_peer()
160 usage = atomic_read(&peer->usage); in rxrpc_get_peer()
209 if (atomic_read(&peer->usage) > 0 && in rxrpc_find_peer()
243 _enter("%p{u=%d}", peer, atomic_read(&peer->usage)); in rxrpc_put_peer()
245 ASSERTCMP(atomic_read(&peer->usage), >, 0); in rxrpc_put_peer()
264 _enter("%p{%d}", peer, atomic_read(&peer->usage)); in rxrpc_destroy_peer()
Dar-transport.c118 usage = atomic_read(&trans->usage); in rxrpc_get_transport()
188 _enter("%p{u=%d}", trans, atomic_read(&trans->usage)); in rxrpc_put_transport()
190 ASSERTCMP(atomic_read(&trans->usage), >, 0); in rxrpc_put_transport()
236 trans->debug_id, atomic_read(&trans->usage), in rxrpc_transport_reaper()
239 if (likely(atomic_read(&trans->usage) > 0)) in rxrpc_transport_reaper()
263 ASSERTCMP(atomic_read(&trans->usage), ==, 0); in rxrpc_transport_reaper()
Dar-proc.c81 atomic_read(&call->usage), in rxrpc_call_seq_show()
164 atomic_read(&conn->usage), in rxrpc_connection_seq_show()
167 atomic_read(&conn->serial), in rxrpc_connection_seq_show()
168 atomic_read(&conn->hi_serial)); in rxrpc_connection_seq_show()
Dar-connection.c155 _leave(" = %p [extant %d]", bundle, atomic_read(&bundle->usage)); in rxrpc_get_bundle()
168 _leave(" = %p [second %d]", bundle, atomic_read(&bundle->usage)); in rxrpc_get_bundle()
178 _enter("%p,%p{%d}",trans, bundle, atomic_read(&bundle->usage)); in rxrpc_put_bundle()
719 _leave(" = %p {u=%d}", conn, atomic_read(&conn->usage)); in rxrpc_incoming_connection()
807 conn, atomic_read(&conn->usage), conn->debug_id); in rxrpc_put_connection()
809 ASSERTCMP(atomic_read(&conn->usage), >, 0); in rxrpc_put_connection()
825 _enter("%p{%d}", conn, atomic_read(&conn->usage)); in rxrpc_destroy_connection()
827 ASSERTCMP(atomic_read(&conn->usage), ==, 0); in rxrpc_destroy_connection()
861 conn->debug_id, atomic_read(&conn->usage), in rxrpc_connection_reaper()
864 if (likely(atomic_read(&conn->usage) > 0)) in rxrpc_connection_reaper()
[all …]
Dar-call.c399 _leave(" = %p [extant %d]", call, atomic_read(&call->usage)); in rxrpc_get_client_call()
407 _leave(" = %p [second %d]", call, atomic_read(&call->usage)); in rxrpc_get_client_call()
606 _leave(" = %p [%d]", call, atomic_read(&call->usage)); in rxrpc_find_server_call()
619 call->debug_id, atomic_read(&call->usage), in rxrpc_release_call()
620 atomic_read(&call->ackr_not_idle), in rxrpc_release_call()
815 _enter("%p{u=%d}", call, atomic_read(&call->usage)); in __rxrpc_put_call()
817 ASSERTCMP(atomic_read(&call->usage), >, 0); in __rxrpc_put_call()
903 call, atomic_read(&call->usage), call->channel, call->conn); in rxrpc_destroy_call()
932 switch (atomic_read(&call->usage)) { in rxrpc_destroy_all_calls()
945 call, atomic_read(&call->usage), in rxrpc_destroy_all_calls()
[all …]
Dar-local.c231 _enter("%p{u=%d}", local, atomic_read(&local->usage)); in rxrpc_put_local()
233 ASSERTCMP(atomic_read(&local->usage), >, 0); in rxrpc_put_local()
254 _enter("%p{%d}", local, atomic_read(&local->usage)); in rxrpc_destroy_local()
259 if (atomic_read(&local->usage) > 0) { in rxrpc_destroy_local()
/linux-4.4.14/arch/x86/platform/uv/
Duv_nmi.c221 nmi = atomic_read(&hub_nmi->in_nmi); in uv_check_nmi()
243 nmi = atomic_read(&hub_nmi->in_nmi); in uv_check_nmi()
250 nmi = atomic_read(&uv_in_nmi); in uv_check_nmi()
268 if (cpu == atomic_read(&hub_nmi->cpu_owner)) { in uv_clear_nmi()
376 atomic_read(&uv_nmi_cpus_in_nmi), num_online_cpus()); in uv_nmi_wait()
444 while (atomic_read(&uv_nmi_cpus_in_nmi) > 0) in uv_nmi_sync_exit()
448 while (atomic_read(&uv_nmi_slave_continue)) in uv_nmi_sync_exit()
463 atomic_read(&uv_nmi_cpus_in_nmi), cpu); in uv_nmi_dump_state()
481 while (!atomic_read(&uv_nmi_slave_continue)) in uv_nmi_dump_state()
518 while (atomic_read(&uv_nmi_kexec_failed) == 0) in uv_nmi_kdump()
[all …]
/linux-4.4.14/net/mac80211/
Dled.h21 if (!atomic_read(&local->rx_led_active)) in ieee80211_led_rx()
32 if (!atomic_read(&local->tx_led_active)) in ieee80211_led_tx()
81 if (ieee80211_is_data(fc) && atomic_read(&local->tpt_led_active)) in ieee80211_tpt_led_trig_tx()
90 if (ieee80211_is_data(fc) && atomic_read(&local->tpt_led_active)) in ieee80211_tpt_led_trig_rx()
/linux-4.4.14/fs/
Dmbcache.c195 mb_assert(!(ce->e_used || ce->e_queued || atomic_read(&ce->e_refcnt))); in __mb_cache_entry_forget()
215 if (!(ce->e_used || ce->e_queued || atomic_read(&ce->e_refcnt))) { in __mb_cache_entry_release()
263 if (ce->e_used || ce->e_queued || atomic_read(&ce->e_refcnt)) in mb_cache_shrink_scan()
270 if (ce->e_used || ce->e_queued || atomic_read(&ce->e_refcnt) || in mb_cache_shrink_scan()
299 atomic_read(&cache->c_entry_count)); in mb_cache_shrink_count()
300 count += atomic_read(&cache->c_entry_count); in mb_cache_shrink_count()
410 atomic_read(&ce->e_refcnt)) in mb_cache_shrink()
420 atomic_read(&ce->e_refcnt) || in mb_cache_shrink()
430 atomic_read(&ce->e_refcnt))); in mb_cache_shrink()
473 atomic_read(&ce->e_refcnt))); in mb_cache_destroy()
[all …]
/linux-4.4.14/fs/xfs/
Dxfs_trans_buf.c164 ASSERT(atomic_read(&bip->bli_refcount) > 0); in xfs_trans_get_buf_map()
215 ASSERT(atomic_read(&bip->bli_refcount) > 0); in xfs_trans_getsb()
285 ASSERT(atomic_read(&bip->bli_refcount) > 0); in xfs_trans_read_buf_map()
374 ASSERT(atomic_read(&bip->bli_refcount) > 0); in xfs_trans_brelse()
435 ASSERT(atomic_read(&bip->bli_refcount) == 0); in xfs_trans_brelse()
461 ASSERT(atomic_read(&bip->bli_refcount) > 0); in xfs_trans_bhold()
481 ASSERT(atomic_read(&bip->bli_refcount) > 0); in xfs_trans_bhold_release()
523 ASSERT(atomic_read(&bip->bli_refcount) > 0); in xfs_trans_log_buf()
594 ASSERT(atomic_read(&bip->bli_refcount) > 0); in xfs_trans_binval()
648 ASSERT(atomic_read(&bip->bli_refcount) > 0); in xfs_trans_inode_buf()
[all …]
Dxfs_dquot_item.c104 ASSERT(atomic_read(&dqp->q_pincount) > 0); in xfs_qm_dquot_logitem_unpin()
130 if (atomic_read(&dqp->q_pincount) == 0) in xfs_qm_dqunpin_wait()
137 wait_event(dqp->q_pinwait, (atomic_read(&dqp->q_pincount) == 0)); in xfs_qm_dqunpin_wait()
151 if (atomic_read(&dqp->q_pincount) > 0) in xfs_qm_dquot_logitem_push()
161 if (atomic_read(&dqp->q_pincount) > 0) { in xfs_qm_dquot_logitem_push()
/linux-4.4.14/arch/x86/kernel/
Dtsc_sync.c146 while (atomic_read(&start_count) != cpus-1) in check_tsc_sync_source()
155 while (atomic_read(&stop_count) != cpus-1) in check_tsc_sync_source()
199 while (atomic_read(&start_count) != cpus) in check_tsc_sync_target()
212 while (atomic_read(&stop_count) != cpus) in check_tsc_sync_target()
/linux-4.4.14/fs/gfs2/
Dlog.c315 gfs2_assert_withdraw(sdp, atomic_read(&sdp->sd_log_blks_free) <= in gfs2_log_release()
353 free_blocks = atomic_read(&sdp->sd_log_blks_free); in gfs2_log_reserve()
360 if (atomic_read(&sdp->sd_log_blks_free) <= wanted) in gfs2_log_reserve()
362 free_blocks = atomic_read(&sdp->sd_log_blks_free); in gfs2_log_reserve()
494 gfs2_assert_withdraw(sdp, atomic_read(&sdp->sd_log_blks_free) <= in log_pull_tail()
505 if (atomic_read(&sdp->sd_log_in_flight)) { in log_flush_wait()
509 if (atomic_read(&sdp->sd_log_in_flight)) in log_flush_wait()
511 } while(atomic_read(&sdp->sd_log_in_flight)); in log_flush_wait()
662 enum gfs2_freeze_state state = atomic_read(&sdp->sd_freeze_state); in log_write_header()
708 enum gfs2_freeze_state state = atomic_read(&sdp->sd_freeze_state); in gfs2_log_flush()
[all …]
/linux-4.4.14/drivers/gpu/drm/amd/amdkfd/
Dkfd_interrupt.c110 unsigned int rptr = atomic_read(&kfd->interrupt_ring_rptr); in enqueue_ih_ring_entry()
111 unsigned int wptr = atomic_read(&kfd->interrupt_ring_wptr); in enqueue_ih_ring_entry()
143 unsigned int wptr = atomic_read(&kfd->interrupt_ring_wptr); in dequeue_ih_ring_entry()
144 unsigned int rptr = atomic_read(&kfd->interrupt_ring_rptr); in dequeue_ih_ring_entry()
/linux-4.4.14/drivers/md/
Dfaulty.c97 atomic_read(&conf->counters[mode]) <= 0) in check_mode()
180 if (atomic_read(&conf->counters[WriteAll])) { in make_request()
234 if ((n=atomic_read(&conf->counters[WriteTransient])) != 0) in status()
238 if ((n=atomic_read(&conf->counters[ReadTransient])) != 0) in status()
242 if ((n=atomic_read(&conf->counters[WritePersistent])) != 0) in status()
246 if ((n=atomic_read(&conf->counters[ReadPersistent])) != 0) in status()
251 if ((n=atomic_read(&conf->counters[ReadFixable])) != 0) in status()
255 if ((n=atomic_read(&conf->counters[WriteAll])) != 0) in status()
Ddm-queue-length.c98 DMEMIT("%d ", atomic_read(&pi->qlen)); in ql_status()
185 (atomic_read(&pi->qlen) < atomic_read(&best->qlen))) in ql_select_path()
188 if (!atomic_read(&best->qlen)) in ql_select_path()
Ddm-service-time.c93 DMEMIT("%d %u ", atomic_read(&pi->in_flight_size), in st_status()
201 sz1 = atomic_read(&pi1->in_flight_size); in st_compare_load()
202 sz2 = atomic_read(&pi2->in_flight_size); in st_compare_load()
/linux-4.4.14/arch/m68k/include/asm/
Datomic.h20 #define atomic_read(v) READ_ONCE((v)->counter) macro
52 : "g" (i), "2" (atomic_read(v))); \
135 prev = atomic_read(v); in atomic_cmpxchg()
148 prev = atomic_read(v); in atomic_xchg()
180 c = atomic_read(v); in __atomic_add_unless()
/linux-4.4.14/drivers/s390/char/
Dmonreader.c212 if (!atomic_read(&monpriv->read_ready)) in mon_next_message()
312 atomic_read(&monpriv->iucv_connected) || in mon_open()
313 atomic_read(&monpriv->iucv_severed)); in mon_open()
314 if (atomic_read(&monpriv->iucv_severed)) { in mon_open()
381 atomic_read(&monpriv->read_ready) || in mon_read()
382 atomic_read(&monpriv->iucv_severed)); in mon_read()
385 if (unlikely(atomic_read(&monpriv->iucv_severed))) in mon_read()
436 if (unlikely(atomic_read(&monpriv->iucv_severed))) in mon_poll()
438 if (atomic_read(&monpriv->read_ready)) in mon_poll()
505 atomic_read(&monpriv->iucv_connected) || in monreader_thaw()
[all …]
/linux-4.4.14/fs/cifs/
Dsmb2ops.c469 atomic_read(&sent[SMB2_NEGOTIATE_HE]), in smb2_print_stats()
470 atomic_read(&failed[SMB2_NEGOTIATE_HE])); in smb2_print_stats()
472 atomic_read(&sent[SMB2_SESSION_SETUP_HE]), in smb2_print_stats()
473 atomic_read(&failed[SMB2_SESSION_SETUP_HE])); in smb2_print_stats()
475 atomic_read(&sent[SMB2_LOGOFF_HE]), in smb2_print_stats()
476 atomic_read(&failed[SMB2_LOGOFF_HE])); in smb2_print_stats()
478 atomic_read(&sent[SMB2_TREE_CONNECT_HE]), in smb2_print_stats()
479 atomic_read(&failed[SMB2_TREE_CONNECT_HE])); in smb2_print_stats()
481 atomic_read(&sent[SMB2_TREE_DISCONNECT_HE]), in smb2_print_stats()
482 atomic_read(&failed[SMB2_TREE_DISCONNECT_HE])); in smb2_print_stats()
[all …]
Dsmb1ops.c654 atomic_read(&tcon->stats.cifs_stats.num_oplock_brks)); in cifs_print_stats()
656 atomic_read(&tcon->stats.cifs_stats.num_reads), in cifs_print_stats()
659 atomic_read(&tcon->stats.cifs_stats.num_writes), in cifs_print_stats()
662 atomic_read(&tcon->stats.cifs_stats.num_flushes)); in cifs_print_stats()
664 atomic_read(&tcon->stats.cifs_stats.num_locks), in cifs_print_stats()
665 atomic_read(&tcon->stats.cifs_stats.num_hardlinks), in cifs_print_stats()
666 atomic_read(&tcon->stats.cifs_stats.num_symlinks)); in cifs_print_stats()
668 atomic_read(&tcon->stats.cifs_stats.num_opens), in cifs_print_stats()
669 atomic_read(&tcon->stats.cifs_stats.num_closes), in cifs_print_stats()
670 atomic_read(&tcon->stats.cifs_stats.num_deletes)); in cifs_print_stats()
[all …]
Dcifs_debug.c180 atomic_read(&server->in_send), in cifs_debug_data_proc_show()
181 atomic_read(&server->num_waiters)); in cifs_debug_data_proc_show()
318 atomic_read(&totBufAllocCount), in cifs_stats_proc_show()
319 atomic_read(&totSmBufAllocCount)); in cifs_stats_proc_show()
322 seq_printf(m, "Operations (MIDs): %d\n", atomic_read(&midCount)); in cifs_stats_proc_show()
348 atomic_read(&tcon->num_smbs_sent)); in cifs_stats_proc_show()
/linux-4.4.14/arch/sh/kernel/
Dftrace.c108 atomic_read(&nmi_update_count)); in ftrace_arch_read_dyn_info()
114 int old = atomic_read(&nmi_running); in clear_mod_flag()
174 if (!atomic_read(&nmi_running)) in wait_for_nmi()
179 } while (atomic_read(&nmi_running)); in wait_for_nmi()
350 if (unlikely(atomic_read(&current->tracing_graph_pause))) in prepare_ftrace_return()
Dsmp.c362 if ((atomic_read(&mm->mm_users) != 1) || (current->mm != mm)) { in flush_tlb_mm()
394 if ((atomic_read(&mm->mm_users) != 1) || (current->mm != mm)) { in flush_tlb_range()
437 if ((atomic_read(&vma->vm_mm->mm_users) != 1) || in flush_tlb_page()
/linux-4.4.14/fs/ocfs2/dlm/
Ddlmdebug.c84 atomic_read(&lock->lock_refs.refcount), in __dlm_print_lock()
109 res->last_used, atomic_read(&res->refs.refcount), in __dlm_print_one_lock_resource()
117 res->inflight_locks, atomic_read(&res->asts_reserved)); in __dlm_print_one_lock_resource()
301 atomic_read(&mle->mle_refs.refcount)); in dump_mle()
517 atomic_read(&lock->lock_refs.refcount)); in dump_lock()
543 atomic_read(&res->asts_reserved), in dump_lockres()
544 atomic_read(&res->refs.refcount)); in dump_lockres()
757 atomic_read(&dlm->res_cur_count), in debug_state_print()
758 atomic_read(&dlm->res_tot_count)); in debug_state_print()
761 tot_mles += atomic_read(&dlm->mle_tot_count[i]); in debug_state_print()
[all …]
/linux-4.4.14/kernel/debug/
Ddebug_core.c401 if (atomic_read(&kgdb_setting_breakpoint)) in kgdb_io_ready()
418 if (atomic_read(&kgdb_active) != raw_smp_processor_id()) in kgdb_reenter_check()
555 if (atomic_read(&kgdb_cpu_doing_single_step) != -1 && in kgdb_cpu_enter()
603 (atomic_read(&masters_in_kgdb) + atomic_read(&slaves_in_kgdb)) != in kgdb_cpu_enter()
652 while (kgdb_do_roundup && atomic_read(&slaves_in_kgdb)) in kgdb_cpu_enter()
657 if (atomic_read(&kgdb_cpu_doing_single_step) != -1) { in kgdb_cpu_enter()
658 int sstep_cpu = atomic_read(&kgdb_cpu_doing_single_step); in kgdb_cpu_enter()
794 if (!kgdb_connected || atomic_read(&kgdb_active) != -1 || dbg_kdb_mode) in kgdb_console_write()
959 if (atomic_read(&kgdb_break_tasklet_var) || in kgdb_schedule_breakpoint()
960 atomic_read(&kgdb_active) != -1 || in kgdb_schedule_breakpoint()
[all …]
/linux-4.4.14/drivers/gpu/drm/qxl/
Dqxl_debugfs.c45 seq_printf(m, "%d\n", atomic_read(&qdev->irq_received)); in qxl_debugfs_irq_received()
46 seq_printf(m, "%d\n", atomic_read(&qdev->irq_received_display)); in qxl_debugfs_irq_received()
47 seq_printf(m, "%d\n", atomic_read(&qdev->irq_received_cursor)); in qxl_debugfs_irq_received()
48 seq_printf(m, "%d\n", atomic_read(&qdev->irq_received_io_cmd)); in qxl_debugfs_irq_received()
Dqxl_cmd.c288 irq_num = atomic_read(&qdev->irq_received_io_cmd); in wait_for_io_cmd_user()
292 atomic_read(&qdev->irq_received_io_cmd) > irq_num, 5*HZ); in wait_for_io_cmd_user()
295 atomic_read(&qdev->irq_received_io_cmd) > irq_num, 5*HZ); in wait_for_io_cmd_user()
299 irq_num = atomic_read(&qdev->irq_received_io_cmd); in wait_for_io_cmd_user()
305 atomic_read(&qdev->irq_received_io_cmd) > irq_num, 5*HZ); in wait_for_io_cmd_user()
308 atomic_read(&qdev->irq_received_io_cmd) > irq_num, 5*HZ); in wait_for_io_cmd_user()
/linux-4.4.14/arch/powerpc/platforms/powermac/
Dbacklight.c102 if (atomic_read(&kernel_backlight_disabled)) in pmac_backlight_key_worker()
130 if (atomic_read(&kernel_backlight_disabled)) in pmac_backlight_key()
169 if (atomic_read(&kernel_backlight_disabled)) in pmac_backlight_set_legacy_worker()
177 if (atomic_read(&kernel_backlight_disabled)) in pmac_backlight_set_legacy_brightness_pmu()
/linux-4.4.14/drivers/mtd/maps/
Dvmu-flash.c149 if (atomic_read(&mdev->busy) == 1) { in maple_vmu_read_block()
151 atomic_read(&mdev->busy) == 0, HZ); in maple_vmu_read_block()
152 if (atomic_read(&mdev->busy) == 1) { in maple_vmu_read_block()
175 (atomic_read(&mdev->busy) == 0 || in maple_vmu_read_block()
176 atomic_read(&mdev->busy) == 2), HZ * 3); in maple_vmu_read_block()
182 if (error || atomic_read(&mdev->busy) == 2) { in maple_vmu_read_block()
183 if (atomic_read(&mdev->busy) == 2) in maple_vmu_read_block()
254 if (atomic_read(&mdev->busy) == 1) { in maple_vmu_write_block()
256 atomic_read(&mdev->busy) == 0, HZ); in maple_vmu_write_block()
257 if (atomic_read(&mdev->busy) == 1) { in maple_vmu_write_block()
[all …]
/linux-4.4.14/drivers/connector/
Dcn_proc.c74 if (atomic_read(&proc_event_num_listeners) < 1) in proc_fork_connector()
105 if (atomic_read(&proc_event_num_listeners) < 1) in proc_exec_connector()
131 if (atomic_read(&proc_event_num_listeners) < 1) in proc_id_connector()
169 if (atomic_read(&proc_event_num_listeners) < 1) in proc_sid_connector()
194 if (atomic_read(&proc_event_num_listeners) < 1) in proc_ptrace_connector()
227 if (atomic_read(&proc_event_num_listeners) < 1) in proc_comm_connector()
253 if (atomic_read(&proc_event_num_listeners) < 1) in proc_coredump_connector()
278 if (atomic_read(&proc_event_num_listeners) < 1) in proc_exit_connector()
313 if (atomic_read(&proc_event_num_listeners) < 1) in cn_proc_ack()
Dcn_queue.c153 while (atomic_read(&dev->refcnt)) { in cn_queue_free_dev()
155 dev->name, atomic_read(&dev->refcnt)); in cn_queue_free_dev()
/linux-4.4.14/drivers/staging/lustre/lnet/klnds/socklnd/
Dsocklnd.h519 LASSERT(atomic_read(&conn->ksnc_conn_refcount) > 0); in ksocknal_conn_addref()
529 LASSERT(atomic_read(&conn->ksnc_conn_refcount) > 0); in ksocknal_conn_decref()
541 LASSERT(atomic_read(&conn->ksnc_sock_refcount) > 0); in ksocknal_connsock_addref()
553 LASSERT(atomic_read(&conn->ksnc_sock_refcount) > 0); in ksocknal_connsock_decref()
565 LASSERT(atomic_read(&tx->tx_refcount) > 0); in ksocknal_tx_addref()
575 LASSERT(atomic_read(&tx->tx_refcount) > 0); in ksocknal_tx_decref()
583 LASSERT(atomic_read(&route->ksnr_refcount) > 0); in ksocknal_route_addref()
592 LASSERT(atomic_read(&route->ksnr_refcount) > 0); in ksocknal_route_decref()
600 LASSERT(atomic_read(&peer->ksnp_refcount) > 0); in ksocknal_peer_addref()
609 LASSERT(atomic_read(&peer->ksnp_refcount) > 0); in ksocknal_peer_decref()
/linux-4.4.14/drivers/staging/lustre/lustre/ldlm/
Dldlm_pool.c220 return atomic_read(&pl->pl_limit); in ldlm_pool_get_limit()
240 int granted = atomic_read(&pl->pl_granted); in ldlm_pool_recalc_stats()
241 int grant_rate = atomic_read(&pl->pl_grant_rate); in ldlm_pool_recalc_stats()
242 int cancel_rate = atomic_read(&pl->pl_cancel_rate); in ldlm_pool_recalc_stats()
457 granted = atomic_read(&pl->pl_granted); in lprocfs_pool_state_seq_show()
458 grant_rate = atomic_read(&pl->pl_grant_rate); in lprocfs_pool_state_seq_show()
459 cancel_rate = atomic_read(&pl->pl_cancel_rate); in lprocfs_pool_state_seq_show()
461 lvf = atomic_read(&pl->pl_lock_volume_factor); in lprocfs_pool_state_seq_show()
490 grant_speed = atomic_read(&pl->pl_grant_rate) - in grant_speed_show()
491 atomic_read(&pl->pl_cancel_rate); in grant_speed_show()
[all …]
Dldlm_lock.c180 LASSERT(atomic_read(&lock->l_refc) > 0); in ldlm_lock_put()
1763 rc = atomic_read(&arg->restart) ? -ERESTART : 0; in ldlm_run_ast_work()
1913 lock->l_handle.h_cookie, atomic_read(&lock->l_refc), in _ldlm_lock_debug()
1918 exp ? atomic_read(&exp->exp_refcount) : -99, in _ldlm_lock_debug()
1929 lock->l_handle.h_cookie, atomic_read(&lock->l_refc), in _ldlm_lock_debug()
1934 atomic_read(&resource->lr_refcount), in _ldlm_lock_debug()
1940 exp ? atomic_read(&exp->exp_refcount) : -99, in _ldlm_lock_debug()
1949 lock->l_handle.h_cookie, atomic_read(&lock->l_refc), in _ldlm_lock_debug()
1954 atomic_read(&resource->lr_refcount), in _ldlm_lock_debug()
1960 exp ? atomic_read(&exp->exp_refcount) : -99, in _ldlm_lock_debug()
[all …]
Dldlm_resource.c439 atomic_read(&res->lr_refcount)); in ldlm_resource_getref()
824 atomic_read(&res->lr_refcount) - 1); in ldlm_resource_complain()
861 if (atomic_read(&ns->ns_bref) > 0) { in __ldlm_namespace_free()
867 ldlm_ns_name(ns), atomic_read(&ns->ns_bref)); in __ldlm_namespace_free()
873 atomic_read(&ns->ns_bref) == 0, &lwi); in __ldlm_namespace_free()
880 atomic_read(&ns->ns_bref), rc); in __ldlm_namespace_free()
884 if (atomic_read(&ns->ns_bref)) { in __ldlm_namespace_free()
887 atomic_read(&ns->ns_bref), rc); in __ldlm_namespace_free()
1213 res, atomic_read(&res->lr_refcount) - 1); in ldlm_resource_putref()
1235 res, atomic_read(&res->lr_refcount) - 1); in ldlm_resource_putref_locked()
[all …]
/linux-4.4.14/net/atm/
Datm_misc.c17 if (atomic_read(&sk_atm(vcc)->sk_rmem_alloc) <= sk_atm(vcc)->sk_rcvbuf) in atm_charge()
32 if (atomic_read(&sk->sk_rmem_alloc) <= sk->sk_rcvbuf) { in atm_alloc_charge()
89 #define __HANDLE_ITEM(i) to->i = atomic_read(&from->i) in sonet_copy_stats()
Dproc.c48 atomic_read(&stats->tx), atomic_read(&stats->tx_err), in add_stats()
49 atomic_read(&stats->rx), atomic_read(&stats->rx_err), in add_stats()
50 atomic_read(&stats->rx_drop)); in add_stats()
64 seq_printf(seq, "\t[%d]", atomic_read(&dev->refcnt)); in atm_dev_info()
214 atomic_read(&sk->sk_refcnt)); in vcc_info()
/linux-4.4.14/arch/arm/mach-exynos/
Dpm.c219 if (atomic_read(&cpu1_wakeup)) in exynos_cpu0_enter_aftr()
262 !atomic_read(&cpu1_wakeup)) in exynos_cpu0_enter_aftr()
265 if (!atomic_read(&cpu1_wakeup)) in exynos_cpu0_enter_aftr()
269 while (!atomic_read(&cpu1_wakeup)) { in exynos_cpu0_enter_aftr()
/linux-4.4.14/lib/
Dfault-inject.c48 atomic_read(&attr->space), in fail_dump()
49 atomic_read(&attr->times)); in fail_dump()
117 if (atomic_read(&attr->times) == 0) in should_fail()
120 if (atomic_read(&attr->space) > size) { in should_fail()
139 if (atomic_read(&attr->times) != -1) in should_fail()
Dis_single_threaded.c25 if (atomic_read(&task->signal->live) != 1) in current_is_single_threaded()
28 if (atomic_read(&mm->mm_users) == 1) in current_is_single_threaded()
/linux-4.4.14/fs/cachefiles/
Dproc.c38 x = atomic_read(&cachefiles_lookup_histogram[index]); in cachefiles_histogram_show()
39 y = atomic_read(&cachefiles_mkdir_histogram[index]); in cachefiles_histogram_show()
40 z = atomic_read(&cachefiles_create_histogram[index]); in cachefiles_histogram_show()
Dinterface.c185 _enter("{OBJ%x,%d}", _object->debug_id, atomic_read(&object->usage)); in cachefiles_grab_object()
188 ASSERT((atomic_read(&object->usage) & 0xffff0000) != 0x6b6b0000); in cachefiles_grab_object()
262 object->fscache.debug_id, atomic_read(&object->usage)); in cachefiles_drop_object()
268 ASSERT((atomic_read(&object->usage) & 0xffff0000) != 0x6b6b0000); in cachefiles_drop_object()
323 object->fscache.debug_id, atomic_read(&object->usage)); in cachefiles_put_object()
326 ASSERT((atomic_read(&object->usage) & 0xffff0000) != 0x6b6b0000); in cachefiles_put_object()
/linux-4.4.14/fs/coda/
Dcache.c35 cii->c_cached_epoch = atomic_read(&permission_epoch); in coda_cache_enter()
49 cii->c_cached_epoch = atomic_read(&permission_epoch) - 1; in coda_cache_clear_inode()
69 cii->c_cached_epoch == atomic_read(&permission_epoch); in coda_cache_check()
/linux-4.4.14/drivers/md/bcache/
Dwriteback.h12 ret += atomic_read(d->stripe_sectors_dirty + i); in bcache_dev_sectors_dirty()
31 if (atomic_read(dc->disk.stripe_sectors_dirty + stripe)) in bcache_dev_stripe_dirty()
72 if (!atomic_read(&dc->has_dirty) && in bch_writeback_add()
Dclosure.c103 if (atomic_read(&cl->remaining) & CLOSURE_WAITING) in closure_wait()
126 if ((atomic_read(&cl->remaining) & in closure_sync()
176 int r = atomic_read(&cl->remaining); in debug_seq_show()
Dio.c81 errors = atomic_read(&ca->io_errors); in bch_count_io_errors()
122 int congested = atomic_read(&c->congested); in bch_bbio_count_io_errors()
/linux-4.4.14/drivers/net/wireless/mwifiex/
Dmain.c188 (atomic_read(&adapter->rx_pending) < LOW_RX_PENDING)) { in mwifiex_process_rx()
255 if (atomic_read(&adapter->rx_pending) >= HIGH_RX_PENDING && in mwifiex_main_process()
960 if (atomic_read(&card->port[i].tx_data_urb_pending)) { in mwifiex_multi_chan_resync()
1011 atomic_read(&cardp->tx_cmd_urb_pending)); in mwifiex_drv_info_dump()
1013 atomic_read(&cardp->port[0].tx_data_urb_pending)); in mwifiex_drv_info_dump()
1015 atomic_read(&cardp->port[1].tx_data_urb_pending)); in mwifiex_drv_info_dump()
1017 atomic_read(&cardp->rx_cmd_urb_pending)); in mwifiex_drv_info_dump()
1019 atomic_read(&cardp->rx_data_urb_pending)); in mwifiex_drv_info_dump()
1023 atomic_read(&adapter->tx_pending)); in mwifiex_drv_info_dump()
1025 atomic_read(&adapter->rx_pending)); in mwifiex_drv_info_dump()
[all …]
Dusb.c247 if (atomic_read(&adapter->rx_pending) <= HIGH_RX_PENDING){ in mwifiex_usb_rx_complete()
341 if (atomic_read(&card->rx_cmd_urb_pending) && card->rx_cmd.urb) in mwifiex_usb_free()
347 if (atomic_read(&card->rx_data_urb_pending)) in mwifiex_usb_free()
527 if (atomic_read(&card->rx_cmd_urb_pending) && card->rx_cmd.urb) in mwifiex_usb_suspend()
530 if (atomic_read(&card->rx_data_urb_pending)) in mwifiex_usb_suspend()
579 if (!atomic_read(&card->rx_data_urb_pending)) in mwifiex_usb_resume()
584 if (!atomic_read(&card->rx_cmd_urb_pending)) { in mwifiex_usb_resume()
876 if (atomic_read(&port->tx_data_urb_pending) in mwifiex_usb_host_to_card()
928 atomic_read(&port->tx_data_urb_pending) == in mwifiex_usb_host_to_card()
1152 (!atomic_read(&card->rx_cmd_urb_pending))) in mwifiex_submit_rx_urb()
/linux-4.4.14/arch/powerpc/kernel/
Dcrash.c124 while ((atomic_read(&cpus_in_crash) < ncpus) && (--msecs > 0)) in crash_kexec_prepare_cpus()
129 if (atomic_read(&cpus_in_crash) >= ncpus) { in crash_kexec_prepare_cpus()
135 ncpus - atomic_read(&cpus_in_crash)); in crash_kexec_prepare_cpus()
166 while (atomic_read(&cpus_in_crash) < ncpus) in crash_kexec_prepare_cpus()
Drtas.c739 while (rc == H_MULTI_THREADS_ACTIVE && !atomic_read(&data->done) && in __rtas_suspend_last_cpu()
740 !atomic_read(&data->error)) in __rtas_suspend_last_cpu()
743 if (rc || atomic_read(&data->error)) { in __rtas_suspend_last_cpu()
748 if (atomic_read(&data->error)) in __rtas_suspend_last_cpu()
749 rc = atomic_read(&data->error); in __rtas_suspend_last_cpu()
785 while (rc == H_SUCCESS && !atomic_read(&data->done) && !atomic_read(&data->error)) in __rtas_suspend_cpu()
976 if (atomic_read(&data.error) != 0) in rtas_ibm_suspend_me()
989 return atomic_read(&data.error); in rtas_ibm_suspend_me()
/linux-4.4.14/drivers/block/drbd/
Ddrbd_proc.c317 atomic_read(&device->local_cnt), in drbd_seq_show()
318 atomic_read(&device->ap_pending_cnt) + in drbd_seq_show()
319 atomic_read(&device->rs_pending_cnt), in drbd_seq_show()
320 atomic_read(&device->unacked_cnt), in drbd_seq_show()
321 atomic_read(&device->ap_bio_cnt), in drbd_seq_show()
342 seq_printf(seq, "\tblocked on activity log: %d\n", atomic_read(&device->ap_actlog_cnt)); in drbd_seq_show()
/linux-4.4.14/sound/usb/usx2y/
Dusbusx2yaudio.c151 if (atomic_read(&subs->state) >= state_PRERUNNING) in usX2Y_urb_play_prepare()
219 state = atomic_read(&playbacksubs->state); in usX2Y_usbframe_complete()
246 state = atomic_read(&capsubs->state); in usX2Y_usbframe_complete()
268 snd_printdd("%i %p state=%i\n", s, subs, atomic_read(&subs->state)); in usX2Y_clients_stop()
275 if (atomic_read(&subs->state) >= state_PRERUNNING) in usX2Y_clients_stop()
302 if (unlikely(atomic_read(&subs->state) < state_PREPARED)) { in i_usX2Y_urb_complete()
320 atomic_read(&capsubs->state) >= state_PREPARED && in i_usX2Y_urb_complete()
322 atomic_read(&playbacksubs->state) < state_PREPARED)) { in i_usX2Y_urb_complete()
476 if (subs != NULL && atomic_read(&subs->state) >= state_PREPARED) in usX2Y_urbs_start()
509 if (atomic_read(&subs->state) != state_PREPARED) in usX2Y_urbs_start()
[all …]
Dusx2yhwdeppcm.c137 if (atomic_read(&subs->state) != state_RUNNING) in usX2Y_hwdep_urb_play_prepare()
180 state = atomic_read(&playbacksubs->state); in usX2Y_usbpcm_usbframe_complete()
207 state = atomic_read(&capsubs->state); in usX2Y_usbpcm_usbframe_complete()
236 if (unlikely(atomic_read(&subs->state) < state_PREPARED)) { in i_usX2Y_usbpcm_urb_complete()
252 if (capsubs->completed_urb && atomic_read(&capsubs->state) >= state_PREPARED && in i_usX2Y_usbpcm_urb_complete()
254 (playbacksubs->completed_urb || atomic_read(&playbacksubs->state) < state_PREPARED)) { in i_usX2Y_usbpcm_urb_complete()
381 if (atomic_read(&playback_subs->state) < state_PREPARED) { in snd_usX2Y_usbpcm_hw_free()
425 if (subs != NULL && atomic_read(&subs->state) >= state_PREPARED) in usX2Y_usbpcm_urbs_start()
465 if (atomic_read(&subs->state) != state_PREPARED) in usX2Y_usbpcm_urbs_start()
500 if (atomic_read(&capsubs->state) < state_PREPARED) { in snd_usX2Y_usbpcm_prepare()
[all …]
/linux-4.4.14/drivers/cpuidle/
Dcoupled.c165 while (atomic_read(a) < n) in cpuidle_coupled_parallel_barrier()
173 while (atomic_read(a) > n) in cpuidle_coupled_parallel_barrier()
255 int r = atomic_read(&coupled->ready_waiting_counts) >> WAITING_BITS; in cpuidle_coupled_no_cpus_ready()
267 int r = atomic_read(&coupled->ready_waiting_counts) >> WAITING_BITS; in cpuidle_coupled_cpus_ready()
279 int w = atomic_read(&coupled->ready_waiting_counts) & WAITING_MASK; in cpuidle_coupled_cpus_waiting()
291 int w = atomic_read(&coupled->ready_waiting_counts) & WAITING_MASK; in cpuidle_coupled_no_cpus_waiting()
/linux-4.4.14/drivers/gpu/drm/via/
Dvia_irq.c105 return atomic_read(&dev_priv->vbl_received); in via_get_vblank_counter()
121 if (!(atomic_read(&dev_priv->vbl_received) & 0x0F)) { in via_driver_irq_handler()
131 if (!(atomic_read(&dev_priv->vbl_received) & 0xFF)) { in via_driver_irq_handler()
246 cur_irq_sequence = atomic_read(&cur_irq->irq_received); in via_driver_irq_wait()
250 atomic_read(&cur_irq->irq_received)) - in via_driver_irq_wait()
370 atomic_read(&cur_irq->irq_received); in via_wait_irq()
/linux-4.4.14/drivers/net/wireless/cw1200/
Dbh.c141 (CW1200_BH_SUSPENDED == atomic_read(&priv->bh_suspend)), in cw1200_bh_suspend()
156 (CW1200_BH_RESUMED == atomic_read(&priv->bh_suspend)), in cw1200_bh_resume()
439 !atomic_read(&priv->recent_scan)) { in cw1200_bh()
453 (atomic_read(&priv->bh_rx) == 0) && in cw1200_bh()
454 (atomic_read(&priv->bh_tx) == 0)) in cw1200_bh()
464 0 : atomic_read(&priv->bh_suspend); in cw1200_bh()
514 !atomic_read(&priv->recent_scan)) { in cw1200_bh()
533 CW1200_BH_RESUME == atomic_read(&priv->bh_suspend)); in cw1200_bh()
Ddebug.c235 atomic_read(&priv->bh_term) ? "terminated" : "alive"); in cw1200_status_show()
237 atomic_read(&priv->bh_rx)); in cw1200_status_show()
239 atomic_read(&priv->bh_tx)); in cw1200_status_show()
263 atomic_read(&priv->tx_lock) ? "locked" : "unlocked"); in cw1200_status_show()
264 if (atomic_read(&priv->tx_lock)) in cw1200_status_show()
266 atomic_read(&priv->tx_lock)); in cw1200_status_show()
287 atomic_read(&priv->scan.in_progress) ? "active" : "idle"); in cw1200_status_show()
/linux-4.4.14/drivers/ras/
Ddebugfs.c9 return atomic_read(&trace_count); in ras_userspace_consumers()
15 return atomic_read(&trace_count); in trace_show()
/linux-4.4.14/net/nfc/nci/
Dcore.c710 if ((atomic_read(&ndev->state) == NCI_DISCOVERY) || in nci_start_poll()
711 (atomic_read(&ndev->state) == NCI_W4_ALL_DISCOVERIES)) { in nci_start_poll()
721 if ((atomic_read(&ndev->state) == NCI_W4_HOST_SELECT) || in nci_start_poll()
722 (atomic_read(&ndev->state) == NCI_POLL_ACTIVE)) { in nci_start_poll()
761 if ((atomic_read(&ndev->state) != NCI_DISCOVERY) && in nci_stop_poll()
762 (atomic_read(&ndev->state) != NCI_W4_ALL_DISCOVERIES)) { in nci_stop_poll()
782 if ((atomic_read(&ndev->state) != NCI_W4_HOST_SELECT) && in nci_activate_target()
783 (atomic_read(&ndev->state) != NCI_POLL_ACTIVE)) { in nci_activate_target()
811 if (atomic_read(&ndev->state) == NCI_W4_HOST_SELECT) { in nci_activate_target()
859 if (atomic_read(&ndev->state) == NCI_POLL_ACTIVE) { in nci_deactivate_target()
[all …]
/linux-4.4.14/drivers/edac/
Dedac_stub.c59 return atomic_read(&edac_handlers); in edac_handler_set()
87 if (atomic_read(&edac_subsys_valid)) in edac_get_sysfs_subsys()
/linux-4.4.14/drivers/media/usb/usbtv/
Dusbtv-audio.c66 if (atomic_read(&chip->snd_stream)) { in snd_usbtv_pcm_close()
131 if (!atomic_read(&chip->snd_stream)) in usbtv_audio_urb_received()
267 if (atomic_read(&usbtv->snd_stream) && usbtv->snd_bulk_urb) in usbtv_audio_suspend()
273 if (atomic_read(&usbtv->snd_stream) && usbtv->snd_bulk_urb) in usbtv_audio_resume()
281 if (atomic_read(&chip->snd_stream)) in snd_usbtv_trigger()
/linux-4.4.14/mm/
Dmmu_notifier.c252 BUG_ON(atomic_read(&mm->mm_users) <= 0); in do_mmu_notifier_register()
298 BUG_ON(atomic_read(&mm->mm_users) <= 0); in do_mmu_notifier_register()
351 BUG_ON(atomic_read(&mm->mm_count) <= 0); in mmu_notifier_unregister()
384 BUG_ON(atomic_read(&mm->mm_count) <= 0); in mmu_notifier_unregister()
404 BUG_ON(atomic_read(&mm->mm_count) <= 0); in mmu_notifier_unregister_no_release()
Ddebug.c86 page, atomic_read(&page->_count), page_mapcount(page), in dump_page_badflags()
206 mm->pgd, atomic_read(&mm->mm_users), in dump_mm()
207 atomic_read(&mm->mm_count), in dump_mm()
Dinternal.h65 VM_BUG_ON_PAGE(atomic_read(&page->_count), page); in set_page_refcounted()
83 VM_BUG_ON_PAGE(atomic_read(&compound_head(page)->_count) <= 0, page); in __get_page_tail_foll()
108 VM_BUG_ON_PAGE(atomic_read(&page->_count) <= 0, page); in get_page_foll()
/linux-4.4.14/sound/drivers/pcsp/
Dpcsp_lib.c30 if (atomic_read(&pcsp_chip.timer_active)) { in pcsp_call_pcm_elapsed()
134 if (!atomic_read(&chip->timer_active) || !chip->playback_substream) in pcsp_do_timer()
157 if (atomic_read(&chip->timer_active)) { in pcsp_start_playing()
178 if (!atomic_read(&chip->timer_active)) in pcsp_stop_playing()
317 if (atomic_read(&chip->timer_active)) { in snd_pcsp_playback_open()
/linux-4.4.14/include/linux/
Diocontext.h131 WARN_ON_ONCE(atomic_read(&ioc->active_ref) <= 0); in get_io_context_active()
140 WARN_ON_ONCE(atomic_read(&ioc->nr_tasks) <= 0); in ioc_task_link()
/linux-4.4.14/drivers/net/wireless/ath/carl9170/
Ddebug.c224 ar->fw.mem_blocks, atomic_read(&ar->mem_allocs)); in carl9170_debugfs_mem_usage_read()
227 atomic_read(&ar->mem_free_blocks), in carl9170_debugfs_mem_usage_read()
228 (atomic_read(&ar->mem_free_blocks) * ar->fw.mem_block_size) / 1024, in carl9170_debugfs_mem_usage_read()
675 atomic_read(&ar->pending_restarts)); in carl9170_debugfs_bug_read()
771 atomic_read(&ar->tx_anch_urbs));
773 atomic_read(&ar->rx_anch_urbs));
775 atomic_read(&ar->rx_work_urbs));
777 atomic_read(&ar->rx_pool_urbs));
780 atomic_read(&ar->tx_total_queued));
782 atomic_read(&ar->tx_ampdu_scheduler));
[all …]
/linux-4.4.14/drivers/media/pci/ivtv/
Divtv-firmware.c346 if (!res && !atomic_read(&itv->capturing) && in ivtv_firmware_check()
347 (!atomic_read(&itv->decoding) || in ivtv_firmware_check()
348 (atomic_read(&itv->decoding) < 2 && test_bit(IVTV_F_I_DEC_YUV, in ivtv_firmware_check()
377 if (res && !atomic_read(&itv->capturing) && in ivtv_firmware_check()
378 !atomic_read(&itv->decoding)) { in ivtv_firmware_check()
/linux-4.4.14/sound/firewire/fireworks/
Dfireworks_stream.c220 if ((atomic_read(&efw->playback_substreams) == 0) && in snd_efw_stream_start_duplex()
221 (atomic_read(&efw->capture_substreams) == 0)) in snd_efw_stream_start_duplex()
280 if (atomic_read(slave_substreams) > 0 && !amdtp_stream_running(slave)) { in snd_efw_stream_start_duplex()
312 if (atomic_read(slave_substreams) == 0) { in snd_efw_stream_stop_duplex()
315 if (atomic_read(master_substreams) == 0) in snd_efw_stream_stop_duplex()
/linux-4.4.14/arch/sh/include/asm/
Datomic.h17 #define atomic_read(v) READ_ONCE((v)->counter) macro
53 c = atomic_read(v); in __atomic_add_unless()
/linux-4.4.14/drivers/infiniband/hw/cxgb3/
Diwch_cm.h58 ep, atomic_read(&((ep)->kref.refcount))); \
59 WARN_ON(atomic_read(&((ep)->kref.refcount)) < 1); \
65 ep, atomic_read(&((ep)->kref.refcount))); \
/linux-4.4.14/fs/reiserfs/
Djournal.c741 if (atomic_read(&nr_reiserfs_jh) <= 0) in reiserfs_free_jh()
905 atomic_read(&other_jl->j_older_commits_done)) in flush_older_commits()
925 if (atomic_read(&other_jl->j_commit_left) != 0) { in flush_older_commits()
957 if (atomic_read(&j->j_async_throttle)) { in reiserfs_async_progress_wait()
989 if (atomic_read(&jl->j_older_commits_done)) { in flush_commit_list()
1021 if (atomic_read(&jl->j_commit_left) <= 0) { in flush_commit_list()
1102 BUG_ON(atomic_read(&jl->j_commit_left) != 1); in flush_commit_list()
1364 if (atomic_read(&journal->j_wcount) != 0) { in flush_journal_list()
1366 atomic_read(&journal->j_wcount)); in flush_journal_list()
1384 if (atomic_read(&jl->j_nonzerolen) <= 0 && in flush_journal_list()
[all …]
/linux-4.4.14/kernel/locking/
Dqrwlock.c118 if (!atomic_read(&lock->cnts) && in queued_write_lock_slowpath()
138 cnts = atomic_read(&lock->cnts); in queued_write_lock_slowpath()
/linux-4.4.14/drivers/net/wireless/rsi/
Drsi_common.h41 (atomic_read(&event->event_condition) == 0)); in rsi_wait_event()
44 (atomic_read(&event->event_condition) == 0), in rsi_wait_event()
/linux-4.4.14/include/trace/events/
Dsock.h23 __entry->rmem_alloc = atomic_read(&sk->sk_rmem_alloc);
51 __entry->rmem_alloc = atomic_read(&sk->sk_rmem_alloc);
Drpm.h37 __entry->usage_count = atomic_read(
43 __entry->child_count = atomic_read(
/linux-4.4.14/drivers/net/irda/
Dvlsi_ir.h684 t = atomic_read(&r->tail) & r->mask; in ring_last()
685 return (((t+1) & r->mask) == (atomic_read(&r->head) & r->mask)) ? NULL : &r->rd[t]; in ring_last()
698 h = atomic_read(&r->head) & r->mask; in ring_first()
699 return (h == (atomic_read(&r->tail) & r->mask)) ? NULL : &r->rd[h]; in ring_first()
/linux-4.4.14/drivers/net/ethernet/chelsio/cxgb3/
Dl2t.c222 if (!atomic_read(&d->nfree)) in alloc_l2e()
227 if (atomic_read(&e->refcnt) == 0) in alloc_l2e()
230 for (e = &d->l2tab[1]; atomic_read(&e->refcnt); ++e) ; in alloc_l2e()
266 if (atomic_read(&e->refcnt) == 0) { /* hasn't been recycled */ in t3_l2e_free()
337 if (atomic_read(&e->refcnt) == 1) in t3_l2t_get()
420 if (atomic_read(&e->refcnt)) { in t3_l2t_update()
/linux-4.4.14/drivers/gpu/host1x/
Dsyncpt.h98 min = atomic_read(&sp->min_val); in host1x_syncpt_idle()
99 max = atomic_read(&sp->max_val); in host1x_syncpt_idle()
Dsyncpt.c283 current_val = (u32)atomic_read(&sp->min_val); in host1x_syncpt_is_expired()
284 future_val = (u32)atomic_read(&sp->max_val); in host1x_syncpt_is_expired()
414 return (u32)atomic_read(&sp->max_val); in host1x_syncpt_read_max()
424 return (u32)atomic_read(&sp->min_val); in host1x_syncpt_read_min()
/linux-4.4.14/drivers/media/v4l2-core/
Dvideobuf2-memops.c95 __func__, h, atomic_read(h->refcount), vma->vm_start, in vb2_common_vm_open()
113 __func__, h, atomic_read(h->refcount), vma->vm_start, in vb2_common_vm_close()
/linux-4.4.14/block/
Dblk-mq-tag.c53 int old = atomic_read(index); in bt_index_atomic_inc()
83 wake_index = atomic_read(&bt->wake_index); in blk_mq_tag_wakeup_all()
136 users = atomic_read(&hctx->tags->active_queues); in hctx_may_queue()
144 return atomic_read(&hctx->nr_active) < depth; in hctx_may_queue()
252 wait_index = atomic_read(&hctx->wait_index); in bt_wait_ptr()
362 wake_index = atomic_read(&bt->wake_index); in bt_wake_ptr()
367 int o = atomic_read(&bt->wake_index); in bt_wake_ptr()
719 page += sprintf(page, "active_queues=%u\n", atomic_read(&tags->active_queues)); in blk_mq_tag_sysfs_show()
/linux-4.4.14/include/net/
Drequest_sock.h112 WARN_ON_ONCE(atomic_read(&req->rsk_refcnt) != 0); in reqsk_free()
222 return atomic_read(&queue->qlen); in reqsk_queue_len()
227 return atomic_read(&queue->young); in reqsk_queue_len_young()
/linux-4.4.14/arch/sparc/kernel/
Dnmi.c56 if (atomic_read(&nmi_active)) { in touch_nmi_watchdog()
166 if (!atomic_read(&nmi_active)) in check_nmi_watchdog()
191 if (!atomic_read(&nmi_active)) { in check_nmi_watchdog()
/linux-4.4.14/fs/pstore/
Dram_core.c42 return atomic_read(&prz->buffer->size); in buffer_size()
47 return atomic_read(&prz->buffer->start); in buffer_start()
57 old = atomic_read(&prz->buffer->start); in buffer_start_add_atomic()
72 if (atomic_read(&prz->buffer->size) == prz->buffer_size) in buffer_size_add_atomic()
76 old = atomic_read(&prz->buffer->size); in buffer_size_add_atomic()
94 old = atomic_read(&prz->buffer->start); in buffer_start_add_locked()
114 old = atomic_read(&prz->buffer->size); in buffer_size_add_locked()
/linux-4.4.14/drivers/media/pci/cx18/
Dcx18-streams.c672 if ((atomic_read(&s->q_free.depth) + atomic_read(&s->q_busy.depth)) >= in cx18_stream_rotate_idx_mdls()
677 if (atomic_read(&s->q_full.depth) < 2) in cx18_stream_rotate_idx_mdls()
719 if (atomic_read(&s->q_free.depth) == 0 || in _cx18_stream_load_fw_queue()
720 atomic_read(&s->q_busy.depth) >= CX18_MAX_FW_MDLS_PER_STREAM) in _cx18_stream_load_fw_queue()
729 } while (atomic_read(&s->q_busy.depth) < CX18_MAX_FW_MDLS_PER_STREAM in _cx18_stream_load_fw_queue()
866 if (atomic_read(&cx->ana_capturing) == 0) in cx18_start_v4l2_encode_stream()
918 if (atomic_read(&cx->tot_capturing) == 0) { in cx18_start_v4l2_encode_stream()
947 if (atomic_read(&cx->tot_capturing) == 0) { in cx18_start_v4l2_encode_stream()
988 if (atomic_read(&cx->tot_capturing) == 0) in cx18_stop_v4l2_encode_stream()
1015 if (atomic_read(&cx->tot_capturing) > 0) in cx18_stop_v4l2_encode_stream()
Dcx18-fileops.c247 if (!atomic_read(&s->q_full.depth)) in cx18_get_mdl()
427 if (atomic_read(&cx->ana_capturing) == 0 && s->id == -1) { in cx18_read()
654 if (atomic_read(&s->q_full.depth)) in cx18_v4l2_enc_poll()
775 if (atomic_read(&cx->ana_capturing) > 0) { in cx18_v4l2_close()
821 if (atomic_read(&cx->ana_capturing) > 0) { in cx18_serialized_open()
867 if (atomic_read(&cx->ana_capturing)) { in cx18_mute()
880 if (atomic_read(&cx->ana_capturing)) { in cx18_unmute()
/linux-4.4.14/drivers/misc/sgi-xp/
Dxpc_main.c350 atomic_read(&part->nchannels_active) > 0 || in xpc_channel_mgr()
370 (atomic_read(&part->channel_mgr_requests) > 0 || in xpc_channel_mgr()
373 atomic_read(&part->nchannels_active) == 0 && in xpc_channel_mgr()
501 DBUG_ON(atomic_read(&part->nchannels_engaged) != 0); in xpc_teardown_ch_structures()
502 DBUG_ON(atomic_read(&part->nchannels_active) != 0); in xpc_teardown_ch_structures()
512 wait_event(part->teardown_wq, (atomic_read(&part->references) == 0)); in xpc_teardown_ch_structures()
619 int idle = atomic_read(&ch->kthreads_idle); in xpc_activate_kthreads()
620 int assigned = atomic_read(&ch->kthreads_assigned); in xpc_activate_kthreads()
836 if (atomic_read(&ch->kthreads_assigned) < in xpc_create_kthreads()
Dxpc_channel.c94 if (atomic_read(&ch->kthreads_assigned) > 0 || in xpc_process_disconnect()
95 atomic_read(&ch->references) > 0) { in xpc_process_disconnect()
123 if (atomic_read(&ch->n_to_notify) > 0) { in xpc_process_disconnect()
136 DBUG_ON(atomic_read(&ch->n_to_notify) != 0); in xpc_process_disconnect()
493 DBUG_ON(atomic_read(&ch->kthreads_assigned) != 0); in xpc_connect_channel()
494 DBUG_ON(atomic_read(&ch->kthreads_idle) != 0); in xpc_connect_channel()
495 DBUG_ON(atomic_read(&ch->kthreads_active) != 0); in xpc_connect_channel()
787 if (atomic_read(&ch->kthreads_idle) > 0) { in xpc_disconnect_channel()
797 if (atomic_read(&ch->n_on_msg_allocate_wq) > 0) in xpc_disconnect_channel()
/linux-4.4.14/drivers/staging/lustre/lustre/lov/
Dlov_request.c79 int completes = atomic_read(&set->set_completes); in lov_set_finished()
202 if (!atomic_read(&set->set_success)) in common_attr_done()
251 if (atomic_read(&set->set_completes)) in lov_fini_getattr_set()
342 if (atomic_read(&set->set_completes)) { in lov_fini_destroy_set()
424 if (atomic_read(&set->set_completes)) { in lov_fini_setattr_set()
586 if (atomic_read(&set->set_completes)) { in lov_fini_statfs_set()
588 atomic_read(&set->set_success)); in lov_fini_statfs_set()
667 success = atomic_read(&set->set_success); in cb_statfs_update()
694 atomic_read(&set->set_success)); in cb_statfs_update()
Dlov_object.c410 lsm->lsm_magic, atomic_read(&lsm->lsm_refc), in lov_print_raid0()
434 lsm->lsm_magic, atomic_read(&lsm->lsm_refc), in lov_print_released()
641 while (atomic_read(&lov->lo_active_ios) > 0) { in lov_layout_wait()
644 atomic_read(&lov->lo_active_ios)); in lov_layout_wait()
647 atomic_read(&lov->lo_active_ios) == 0, &lwi); in lov_layout_wait()
691 LASSERT(atomic_read(&lov->lo_active_ios) == 0); in lov_layout_change()
760 atomic_read(&lov->lo_active_ios) > 0) { in lov_conf_set()
783 if (atomic_read(&lov->lo_active_ios) > 0) { in lov_conf_set()
919 lsm, atomic_read(&lsm->lsm_refc), in lov_lsm_addref()
/linux-4.4.14/drivers/target/iscsi/
Discsi_target_erl0.c763 if (atomic_read(&sess->session_reinstatement)) { in iscsit_handle_time2retain_timeout()
851 if (atomic_read(&conn->connection_exit)) { in iscsit_connection_reinstatement_rcfr()
856 if (atomic_read(&conn->transport_failed)) { in iscsit_connection_reinstatement_rcfr()
875 if (atomic_read(&conn->connection_exit)) { in iscsit_cause_connection_reinstatement()
880 if (atomic_read(&conn->transport_failed)) { in iscsit_cause_connection_reinstatement()
885 if (atomic_read(&conn->connection_reinstatement)) { in iscsit_cause_connection_reinstatement()
922 !atomic_read(&sess->session_reinstatement) && in iscsit_handle_connection_cleanup()
923 !atomic_read(&sess->session_fall_back_to_erl0)) in iscsit_handle_connection_cleanup()
936 if (atomic_read(&conn->connection_exit)) { in iscsit_take_action_for_connection_exit()
Discsi_target.c2547 hdr->max_cmdsn = cpu_to_be32((u32) atomic_read(&conn->sess->max_cmd_sn)); in iscsit_send_conn_drop_async_message()
2619 hdr->max_cmdsn = cpu_to_be32((u32) atomic_read(&conn->sess->max_cmd_sn)); in iscsit_build_datain_pdu()
2830 hdr->max_cmdsn = cpu_to_be32((u32) atomic_read(&conn->sess->max_cmd_sn)); in iscsit_build_logout_rsp()
2893 hdr->max_cmdsn = cpu_to_be32((u32) atomic_read(&conn->sess->max_cmd_sn)); in iscsit_build_nopin_rsp()
3040 hdr->max_cmdsn = cpu_to_be32((u32) atomic_read(&conn->sess->max_cmd_sn)); in iscsit_send_r2t()
3193 hdr->max_cmdsn = cpu_to_be32((u32) atomic_read(&conn->sess->max_cmd_sn)); in iscsit_build_rsp_pdu()
3312 hdr->max_cmdsn = cpu_to_be32((u32) atomic_read(&conn->sess->max_cmd_sn)); in iscsit_build_task_mgt_rsp()
3567 hdr->max_cmdsn = cpu_to_be32((u32) atomic_read(&conn->sess->max_cmd_sn)); in iscsit_build_text_rsp()
3645 hdr->max_cmdsn = cpu_to_be32((u32) atomic_read(&conn->sess->max_cmd_sn)); in iscsit_build_reject()
3837 if (atomic_read(&conn->check_immediate_queue)) in iscsit_response_queue()
[all …]
/linux-4.4.14/drivers/usb/misc/
Diowarrior.c171 intr_idx = atomic_read(&dev->intr_idx); in iowarrior_callback()
174 read_idx = atomic_read(&dev->read_idx); in iowarrior_callback()
262 read_idx = atomic_read(&dev->read_idx); in read_index()
263 intr_idx = atomic_read(&dev->intr_idx); in read_index()
326 } while (atomic_read(&dev->overflow_flag)); in iowarrior_read()
387 if (atomic_read(&dev->write_busy) == MAX_WRITES_IN_FLIGHT) { in iowarrior_write()
394 (!dev->present || (atomic_read (&dev-> write_busy) < MAX_WRITES_IN_FLIGHT))); in iowarrior_write()
443 retval, atomic_read(&dev->write_busy)); in iowarrior_write()
707 if (atomic_read(&dev->write_busy) < MAX_WRITES_IN_FLIGHT) in iowarrior_poll()
/linux-4.4.14/drivers/w1/
Dw1_family.c89 while (atomic_read(&fent->refcnt)) { in w1_unregister_family()
91 fent->fid, atomic_read(&fent->refcnt)); in w1_unregister_family()
/linux-4.4.14/drivers/media/platform/s5p-mfc/
Ds5p_mfc_pm.c104 mfc_debug(3, "+ %d\n", atomic_read(&clk_ref)); in s5p_mfc_clock_on()
114 mfc_debug(3, "- %d\n", atomic_read(&clk_ref)); in s5p_mfc_clock_off()
/linux-4.4.14/arch/mn10300/include/asm/
Datomic.h37 #define atomic_read(v) READ_ONCE((v)->counter) macro
125 c = atomic_read(v); \
/linux-4.4.14/drivers/gpu/drm/mga/
Dmga_irq.c46 return atomic_read(&dev_priv->vbl_received); in mga_get_vblank_counter()
132 (((cur_fence = atomic_read(&dev_priv->last_fence_retired)) in mga_driver_fence_wait()
/linux-4.4.14/drivers/net/ethernet/cisco/enic/
Dvnic_rq.h227 WARN_ON(atomic_read(&rq->bpoll_state) != ENIC_POLL_STATE_NAPI); in enic_poll_unlock_napi()
243 WARN_ON(atomic_read(&rq->bpoll_state) != ENIC_POLL_STATE_POLL); in enic_poll_unlock_poll()
249 return atomic_read(&rq->bpoll_state) & ENIC_POLL_STATE_POLL; in enic_poll_busy_polling()
/linux-4.4.14/drivers/target/
Dtarget_core_ua.c58 if (!atomic_read(&deve->ua_count)) { in target_scsi3_ua_check()
230 if (!atomic_read(&deve->ua_count)) { in core_scsi3_ua_for_check_condition()
302 if (!atomic_read(&deve->ua_count)) { in core_scsi3_ua_clear_for_request_sense()
/linux-4.4.14/fs/ocfs2/
Djournal.c318 flushed = atomic_read(&journal->j_num_trans); in ocfs2_commit_cache()
336 flushed = atomic_read(&journal->j_num_trans); in ocfs2_commit_cache()
990 num_running_trans = atomic_read(&(osb->journal->j_num_trans)); in ocfs2_journal_shutdown()
1009 BUG_ON(atomic_read(&(osb->journal->j_num_trans)) != 0); in ocfs2_journal_shutdown()
1922 if (atomic_read(&os->os_state) == ORPHAN_SCAN_INACTIVE) in ocfs2_queue_orphan_scan()
1926 atomic_read(&os->os_state)); in ocfs2_queue_orphan_scan()
1936 if (atomic_read(&os->os_state) == ORPHAN_SCAN_INACTIVE) in ocfs2_queue_orphan_scan()
1958 atomic_read(&os->os_state)); in ocfs2_queue_orphan_scan()
1974 if (atomic_read(&os->os_state) == ORPHAN_SCAN_ACTIVE) in ocfs2_orphan_scan_work()
1985 if (atomic_read(&os->os_state) == ORPHAN_SCAN_ACTIVE) { in ocfs2_orphan_scan_stop()
[all …]
/linux-4.4.14/arch/parisc/kernel/
Dftrace.c67 trace->overrun = atomic_read(&current->trace_overrun); in pop_return_trace()
118 if (unlikely(atomic_read(&current->tracing_graph_pause))) in prepare_ftrace_return()
/linux-4.4.14/security/keys/
Dproc.c255 atomic_read(&key->usage), in proc_keys_show()
343 atomic_read(&user->usage), in proc_key_users_show()
344 atomic_read(&user->nkeys), in proc_key_users_show()
345 atomic_read(&user->nikeys), in proc_key_users_show()
/linux-4.4.14/drivers/crypto/caam/
Dcaamrng.c146 if (atomic_read(&bd->empty)) { in caam_read()
148 if (atomic_read(&bd->empty) == BUF_EMPTY) { in caam_read()
159 if (atomic_read(&bd->empty)) in caam_read()
252 if (atomic_read(&bd->empty) == BUF_PENDING) in caam_cleanup()
/linux-4.4.14/sound/soc/fsl/
Dimx-pcm-fiq.c56 if (!atomic_read(&iprtd->playing) && !atomic_read(&iprtd->capturing)) in snd_hrtimer_callback()
137 if (!atomic_read(&iprtd->playing) && in snd_imx_pcm_trigger()
138 !atomic_read(&iprtd->capturing)) in snd_imx_pcm_trigger()
/linux-4.4.14/drivers/staging/lustre/lnet/selftest/
Dselftest.h243 atomic_read(&(rpc)->crpc_refcount)); \
244 LASSERT(atomic_read(&(rpc)->crpc_refcount) > 0); \
252 atomic_read(&(rpc)->crpc_refcount)); \
253 LASSERT(atomic_read(&(rpc)->crpc_refcount) > 0); \
509 LASSERT(atomic_read(&rpc->crpc_refcount) == 0); in srpc_destroy_client_rpc()
/linux-4.4.14/drivers/base/
Ddd.c281 int local_trigger_count = atomic_read(&deferred_trigger_count); in really_probe()
350 if (local_trigger_count != atomic_read(&deferred_trigger_count)) in really_probe()
384 atomic_read(&probe_count)); in driver_probe_done()
385 if (atomic_read(&probe_count)) in driver_probe_done()
397 wait_event(probe_waitqueue, atomic_read(&probe_count) == 0); in wait_for_device_probe()
/linux-4.4.14/drivers/infiniband/core/
Dfmr_pool.c182 if (atomic_read(&pool->flush_ser) - atomic_read(&pool->req_ser) < 0) { in ib_fmr_cleanup_thread()
193 if (atomic_read(&pool->flush_ser) - atomic_read(&pool->req_ser) >= 0 && in ib_fmr_cleanup_thread()
418 atomic_read(&pool->flush_ser) - serial >= 0)) in ib_flush_fmr_pool()
/linux-4.4.14/drivers/infiniband/hw/nes/
Dnes_nic.c1277 target_stat_values[++index] = atomic_read(&cm_connects); in nes_netdev_get_ethtool_stats()
1278 target_stat_values[++index] = atomic_read(&cm_accepts); in nes_netdev_get_ethtool_stats()
1279 target_stat_values[++index] = atomic_read(&cm_disconnects); in nes_netdev_get_ethtool_stats()
1280 target_stat_values[++index] = atomic_read(&cm_connecteds); in nes_netdev_get_ethtool_stats()
1281 target_stat_values[++index] = atomic_read(&cm_connect_reqs); in nes_netdev_get_ethtool_stats()
1282 target_stat_values[++index] = atomic_read(&cm_rejects); in nes_netdev_get_ethtool_stats()
1283 target_stat_values[++index] = atomic_read(&mod_qp_timouts); in nes_netdev_get_ethtool_stats()
1284 target_stat_values[++index] = atomic_read(&qps_created); in nes_netdev_get_ethtool_stats()
1285 target_stat_values[++index] = atomic_read(&sw_qps_destroyed); in nes_netdev_get_ethtool_stats()
1286 target_stat_values[++index] = atomic_read(&qps_destroyed); in nes_netdev_get_ethtool_stats()
[all …]
/linux-4.4.14/arch/arm/kernel/
Dmachine_kexec.c126 while ((atomic_read(&waiting_for_crash_ipi) > 0) && msecs) { in machine_crash_shutdown()
130 if (atomic_read(&waiting_for_crash_ipi) > 0) in machine_crash_shutdown()
/linux-4.4.14/arch/x86/include/asm/
Datomic.h25 static __always_inline int atomic_read(const atomic_t *v) in atomic_read() function
212 c = atomic_read(v); in ATOMIC_OP()
/linux-4.4.14/drivers/oprofile/
Devent_buffer.c170 wait_event_interruptible(buffer_wait, atomic_read(&buffer_ready)); in event_buffer_read()
176 if (!atomic_read(&buffer_ready)) in event_buffer_read()
/linux-4.4.14/arch/xtensa/include/asm/
Datomic.h50 #define atomic_read(v) READ_ONCE((v)->counter) macro
245 c = atomic_read(v); in ATOMIC_OPS()
/linux-4.4.14/drivers/crypto/qat/qat_common/
Dqat_crypto.c76 for (i = 0; i < atomic_read(&inst->refctr); i++) in qat_crypto_free_instances()
114 ctr = atomic_read(&tmp_dev->ref_count); in qat_crypto_get_instance_node()
146 ctr = atomic_read(&tmp_inst->refctr); in qat_crypto_get_instance_node()
/linux-4.4.14/drivers/scsi/
Dsg.c244 (atomic_read(&sdp->detaching) || in open_wait()
250 if (atomic_read(&sdp->detaching)) in open_wait()
257 (atomic_read(&sdp->detaching) || in open_wait()
263 if (atomic_read(&sdp->detaching)) in open_wait()
446 if (atomic_read(&sdp->detaching)) { in sg_read()
455 (atomic_read(&sdp->detaching) || in sg_read()
457 if (atomic_read(&sdp->detaching)) { in sg_read()
599 if (atomic_read(&sdp->detaching)) in sg_write()
790 if (atomic_read(&sdp->detaching)) { in sg_common_write()
857 if (atomic_read(&sdp->detaching)) in sg_ioctl()
[all …]
/linux-4.4.14/fs/jbd2/
Dcommit.c72 if (atomic_read(&bh->b_count) != 1) in release_buffer_page()
452 while (atomic_read(&commit_transaction->t_updates)) { in jbd2_journal_commit_transaction()
457 if (atomic_read(&commit_transaction->t_updates)) { in jbd2_journal_commit_transaction()
468 J_ASSERT (atomic_read(&commit_transaction->t_outstanding_credits) <= in jbd2_journal_commit_transaction()
530 atomic_sub(atomic_read(&journal->j_reserved_credits), in jbd2_journal_commit_transaction()
576 atomic_read(&commit_transaction->t_outstanding_credits); in jbd2_journal_commit_transaction()
580 atomic_read(&commit_transaction->t_outstanding_credits)); in jbd2_journal_commit_transaction()
837 J_ASSERT_BH(bh, atomic_read(&bh->b_count) == 0); in jbd2_journal_commit_transaction()
1099 atomic_read(&commit_transaction->t_handle_count); in jbd2_journal_commit_transaction()
/linux-4.4.14/drivers/staging/android/
Dsync.c330 int err = atomic_read(&fence->status); in sync_fence_wait_async()
343 err = atomic_read(&fence->status); in sync_fence_wait_async()
385 atomic_read(&fence->status) <= 0, in sync_fence_wait()
400 ret = atomic_read(&fence->status); in sync_fence_wait()
522 int i, status = atomic_read(&fence->status); in sync_fence_free()
551 status = atomic_read(&fence->status); in sync_fence_poll()
673 data->status = atomic_read(&fence->status); in sync_fence_ioctl_fence_info()
/linux-4.4.14/net/rds/
Dib_rdma.c285 WARN_ON(atomic_read(&pool->item_count)); in rds_ib_destroy_mr_pool()
286 WARN_ON(atomic_read(&pool->free_pinned)); in rds_ib_destroy_mr_pool()
332 if (atomic_read(&pool->dirty_count) >= pool->max_items / 10) in rds_ib_alloc_fmr()
336 if (atomic_read(&pool->dirty_count) >= pool->max_items * 9 / 10) { in rds_ib_alloc_fmr()
568 item_count = atomic_read(&pool->item_count); in rds_ib_flush_goal()
774 if (atomic_read(&pool->free_pinned) >= pool->max_free_pinned || in rds_ib_free_mr()
775 atomic_read(&pool->dirty_count) >= pool->max_items / 5) in rds_ib_free_mr()
/linux-4.4.14/drivers/net/ethernet/chelsio/cxgb4/
Dl2t.c278 if (!atomic_read(&d->nfree)) in alloc_l2e()
283 if (atomic_read(&e->refcnt) == 0) in alloc_l2e()
286 for (e = d->l2tab; atomic_read(&e->refcnt); ++e) in alloc_l2e()
316 if (atomic_read(&e->refcnt) == 0) { /* hasn't been recycled */ in t4_l2e_free()
391 if (atomic_read(&e->refcnt) == 1) in cxgb4_l2t_get()
493 if (atomic_read(&e->refcnt)) in t4_l2t_update()
647 l2e_state(e), atomic_read(&e->refcnt), in l2t_seq_show()
/linux-4.4.14/fs/f2fs/
Ddebug.c42 si->ext_node = atomic_read(&sbi->total_ext_node); in update_general_status()
55 si->inline_xattr = atomic_read(&sbi->inline_xattr); in update_general_status()
56 si->inline_inode = atomic_read(&sbi->inline_inode); in update_general_status()
57 si->inline_dir = atomic_read(&sbi->inline_dir); in update_general_status()
92 si->inplace_count = atomic_read(&sbi->inplace_count); in update_general_status()
196 si->cache_mem += atomic_read(&sbi->total_ext_node) * in update_mem_info()
/linux-4.4.14/security/tomoyo/
Dgc.c439 if (atomic_read(&container_of in tomoyo_try_to_gc()
522 if (!domain->is_deleted || atomic_read(&domain->users)) in tomoyo_collect_entry()
538 if (atomic_read(&ptr->users) > 0) in tomoyo_collect_entry()
563 atomic_read(&group->head.users) > 0) in tomoyo_collect_entry()
577 if (atomic_read(&ptr->users) > 0) in tomoyo_collect_entry()
/linux-4.4.14/fs/nfsd/
Dnfscache.c239 if (atomic_read(&num_drc_entries) <= max_drc_entries && in prune_bucket()
273 return atomic_read(&num_drc_entries); in nfsd_reply_cache_count()
362 longest_chain_cachesize = atomic_read(&num_drc_entries); in nfsd_cache_search()
367 atomic_read(&num_drc_entries)); in nfsd_cache_search()
594 atomic_read(&num_drc_entries)); in nfsd_reply_cache_stats_show()
/linux-4.4.14/sound/core/
Dhrtimer.c50 if (!atomic_read(&stime->running)) in snd_hrtimer_callback()
56 if (!atomic_read(&stime->running)) in snd_hrtimer_callback()
/linux-4.4.14/drivers/s390/net/
Dfsm.h146 int state = atomic_read(&fi->state); in fsm_event()
214 return atomic_read(&fi->state); in fsm_getstate()
/linux-4.4.14/arch/powerpc/mm/
Dicswx.c113 if (atomic_read(&mm->mm_users) > 1) in use_cop()
153 if (atomic_read(&mm->mm_users) > 1) in drop_cop()
/linux-4.4.14/arch/avr32/include/asm/
Datomic.h22 #define atomic_read(v) READ_ONCE((v)->counter) macro
116 int tmp, old = atomic_read(v); in __atomic_add_unless()
/linux-4.4.14/drivers/s390/cio/
Dcrw.c69 atomic_read(&crw_nr_req) > 0); in crw_collect_info()
144 wait_event(crw_handler_wait_q, atomic_read(&crw_nr_req) == 0); in crw_wait_for_channel_report()
/linux-4.4.14/arch/m32r/include/asm/
Datomic.h31 #define atomic_read(v) READ_ONCE((v)->counter) macro
234 c = atomic_read(v); in __atomic_add_unless()
/linux-4.4.14/drivers/media/usb/tm6000/
Dtm6000-alsa.c201 if (atomic_read(&core->stream_started) > 0) { in snd_tm6000_close()
218 if (atomic_read(&core->stream_started) == 0) in tm6000_fillbuf()
304 if (atomic_read(&core->stream_started) > 0) { in snd_tm6000_hw_free()
336 if (atomic_read(&core->stream_started)) { in audio_trigger()
/linux-4.4.14/fs/proc/
Dtask_nommu.c39 if (atomic_read(&mm->mm_count) > 1 || in task_mem()
49 if (atomic_read(&mm->mm_count) > 1) in task_mem()
59 if (current->files && atomic_read(&current->files->count) > 1) in task_mem()
64 if (current->sighand && atomic_read(&current->sighand->count) > 1) in task_mem()
/linux-4.4.14/arch/ia64/include/asm/
Datomic.h24 #define atomic_read(v) READ_ONCE((v)->counter) macro
39 old = atomic_read(v); \
144 c = atomic_read(v); in __atomic_add_unless()
/linux-4.4.14/drivers/misc/sgi-gru/
Dgrutlbpurge.c232 start, end, atomic_read(&gms->ms_range_active)); in gru_invalidate_range_start()
322 atomic_read(&gms->ms_refcnt)); in gru_register_mmu_notifier()
332 atomic_read(&gms->ms_refcnt), gms->ms_released); in gru_drop_mmu_notifier()
/linux-4.4.14/security/integrity/ima/
Dima_main.c91 if (atomic_read(&inode->i_readcount) && IS_IMA(inode)) { in ima_rdwr_violation_check()
99 if ((atomic_read(&inode->i_writecount) > 0) && must_measure) in ima_rdwr_violation_check()
125 if (atomic_read(&inode->i_writecount) == 1) { in ima_check_last_writer()
/linux-4.4.14/kernel/rcu/
Drcutorture.c1068 if (atomic_read(&beenhere)) in rcutorture_trace_dump()
1253 atomic_read(&n_rcu_torture_alloc), in rcu_torture_stats_print()
1254 atomic_read(&n_rcu_torture_alloc_fail), in rcu_torture_stats_print()
1255 atomic_read(&n_rcu_torture_free)); in rcu_torture_stats_print()
1257 atomic_read(&n_rcu_torture_mberror), in rcu_torture_stats_print()
1272 if (atomic_read(&n_rcu_torture_mberror) != 0 || in rcu_torture_stats_print()
1296 pr_cont(" %d", atomic_read(&rcu_torture_wcount[i])); in rcu_torture_stats_print()
1495 atomic_read(&barrier_cbs_count) == 0 || in rcu_torture_barrier()
1501 if (atomic_read(&barrier_cbs_invoked) != n_barrier_cbs) { in rcu_torture_barrier()
1504 atomic_read(&barrier_cbs_invoked), in rcu_torture_barrier()
[all …]
/linux-4.4.14/net/sunrpc/xprtrdma/
Dsvc_rdma_transport.c306 if (atomic_read(&xprt->sc_xprt.xpt_ref.refcount)==0) in rq_comp_handler()
488 if (atomic_read(&xprt->sc_xprt.xpt_ref.refcount)==0) in sq_comp_handler()
1166 if (atomic_read(&rdma->sc_xprt.xpt_ref.refcount) != 0) in __svc_rdma_free()
1168 atomic_read(&rdma->sc_xprt.xpt_ref.refcount)); in __svc_rdma_free()
1196 if (atomic_read(&rdma->sc_ctxt_used) != 0) in __svc_rdma_free()
1198 atomic_read(&rdma->sc_ctxt_used)); in __svc_rdma_free()
1199 if (atomic_read(&rdma->sc_dma_used) != 0) in __svc_rdma_free()
1201 atomic_read(&rdma->sc_dma_used)); in __svc_rdma_free()
1274 if (xprt->sc_sq_depth < atomic_read(&xprt->sc_sq_count) + wr_count) { in svc_rdma_send()
1283 atomic_read(&xprt->sc_sq_count) < in svc_rdma_send()
[all …]
/linux-4.4.14/fs/configfs/
Dconfigfs_internal.h151 WARN_ON(!atomic_read(&sd->s_count)); in configfs_get()
159 WARN_ON(!atomic_read(&sd->s_count)); in configfs_put()
/linux-4.4.14/fs/kernfs/
Ddir.c30 return atomic_read(&kn->active) >= 0; in kernfs_active()
379 if (atomic_read(&kn->active) != KN_DEACTIVATED_BIAS) in kernfs_drain()
385 atomic_read(&kn->active) == KN_DEACTIVATED_BIAS); in kernfs_drain()
404 WARN_ON(!atomic_read(&kn->count)); in kernfs_get()
431 WARN_ONCE(atomic_read(&kn->active) != KN_DEACTIVATED_BIAS, in kernfs_put()
433 parent ? parent->name : "", kn->name, atomic_read(&kn->active)); in kernfs_put()
1041 WARN_ON_ONCE(atomic_read(&pos->active) != KN_DEACTIVATED_BIAS); in kernfs_activate()
1093 WARN_ON_ONCE(atomic_read(&kn->active) != KN_DEACTIVATED_BIAS); in __kernfs_remove()
1238 atomic_read(&kn->active) == KN_DEACTIVATED_BIAS) in kernfs_remove_self()
/linux-4.4.14/net/ipv4/
Dinet_timewait_sock.c162 if (atomic_read(&dr->tw_count) >= dr->sysctl_max_tw_buckets) in inet_twsk_alloc()
278 atomic_read(&twsk_net(tw)->count)) in inet_twsk_purge()
285 atomic_read(&twsk_net(tw)->count))) { in inet_twsk_purge()
/linux-4.4.14/fs/notify/
Dinode_mark.c176 if (!atomic_read(&inode->i_count)) { in fsnotify_unmount_inodes()
195 atomic_read(&next_i->i_count)) { in fsnotify_unmount_inodes()
/linux-4.4.14/arch/blackfin/kernel/
Dnmi.c151 if (!atomic_read(&nmi_touched[this_cpu])) in check_nmi_wdt_touched()
160 if (!atomic_read(&nmi_touched[cpu])) in check_nmi_wdt_touched()
/linux-4.4.14/arch/sparc/include/asm/
Datomic_64.h17 #define atomic_read(v) READ_ONCE((v)->counter) macro
82 c = atomic_read(v); in ATOMIC_OPS()
/linux-4.4.14/drivers/misc/ibmasm/
Dcommand.c61 dbg("command count: %d\n", atomic_read(&command_count)); in ibmasm_new_command()
72 dbg("command count: %d\n", atomic_read(&command_count)); in ibmasm_free_command()
/linux-4.4.14/kernel/debug/kdb/
Dkdb_debugger.c39 kdb_initial_cpu = atomic_read(&kgdb_active); in kdb_common_init_state()
69 if (atomic_read(&kgdb_setting_breakpoint)) in kdb_stub()
/linux-4.4.14/arch/arc/include/asm/
Datomic.h20 #define atomic_read(v) READ_ONCE((v)->counter) macro
209 c = atomic_read(v); \
/linux-4.4.14/arch/m68k/kernel/
Dirq.c37 seq_printf(p, "%*s: %10u\n", prec, "ERR", atomic_read(&irq_err_count)); in arch_show_interrupts()
/linux-4.4.14/net/bluetooth/hidp/
Dcore.c106 if (atomic_read(&session->terminate)) in hidp_send_message()
238 if (atomic_read(&session->terminate)) in hidp_get_raw_report()
270 !atomic_read(&session->terminate)) { in hidp_get_raw_report()
275 || atomic_read(&session->terminate), in hidp_get_raw_report()
345 !atomic_read(&session->terminate)) { in hidp_set_raw_report()
350 || atomic_read(&session->terminate), in hidp_set_raw_report()
1059 while (atomic_read(&session->state) <= HIDP_SESSION_IDLING) in hidp_session_start_sync()
1061 atomic_read(&session->state) > HIDP_SESSION_IDLING); in hidp_session_start_sync()
1162 atomic_read(&session->state) > HIDP_SESSION_PREPARING) in hidp_session_remove()
1197 if (atomic_read(&session->terminate)) in hidp_session_run()
/linux-4.4.14/include/misc/
Dcxl-base.h26 return (atomic_read(&cxl_use_count) != 0); in cxl_ctx_in_use()
/linux-4.4.14/tools/include/asm-generic/
Datomic-gcc.h22 static inline int atomic_read(const atomic_t *v) in atomic_read() function

12345678