rcu_data           82 kernel/rcu/tree.c static DEFINE_PER_CPU_SHARED_ALIGNED(struct rcu_data, rcu_data) = {
rcu_data          151 kernel/rcu/tree.c static void rcu_report_exp_rdp(struct rcu_data *rdp);
rcu_data          212 kernel/rcu/tree.c 	struct rcu_data *rdp = per_cpu_ptr(&rcu_data, cpu);
rcu_data          231 kernel/rcu/tree.c 	struct rcu_data *rdp = this_cpu_ptr(&rcu_data);
rcu_data          254 kernel/rcu/tree.c 	struct rcu_data *rdp = this_cpu_ptr(&rcu_data);
rcu_data          285 kernel/rcu/tree.c 	struct rcu_data *rdp = this_cpu_ptr(&rcu_data);
rcu_data          299 kernel/rcu/tree.c 	struct rcu_data *rdp = this_cpu_ptr(&rcu_data);
rcu_data          308 kernel/rcu/tree.c int rcu_dynticks_snap(struct rcu_data *rdp)
rcu_data          329 kernel/rcu/tree.c static bool rcu_dynticks_in_eqs_since(struct rcu_data *rdp, int snap)
rcu_data          345 kernel/rcu/tree.c 	struct rcu_data *rdp = &per_cpu(rcu_data, cpu);
rcu_data          371 kernel/rcu/tree.c 	raw_cpu_write(rcu_data.rcu_need_heavy_qs, false);
rcu_data          373 kernel/rcu/tree.c 				    &this_cpu_ptr(&rcu_data)->dynticks);
rcu_data          392 kernel/rcu/tree.c 	RCU_LOCKDEP_WARN(__this_cpu_read(rcu_data.dynticks_nesting) < 0,
rcu_data          394 kernel/rcu/tree.c 	RCU_LOCKDEP_WARN(__this_cpu_read(rcu_data.dynticks_nmi_nesting) <= 0,
rcu_data          398 kernel/rcu/tree.c 	if (__this_cpu_read(rcu_data.dynticks_nmi_nesting) != 1)
rcu_data          402 kernel/rcu/tree.c 	return __this_cpu_read(rcu_data.dynticks_nesting) == 0;
rcu_data          498 kernel/rcu/tree.c static void force_qs_rnp(int (*f)(struct rcu_data *rdp));
rcu_data          567 kernel/rcu/tree.c 	struct rcu_data *rdp = this_cpu_ptr(&rcu_data);
rcu_data          581 kernel/rcu/tree.c 	rdp = this_cpu_ptr(&rcu_data);
rcu_data          637 kernel/rcu/tree.c 	struct rcu_data *rdp = this_cpu_ptr(&rcu_data);
rcu_data          733 kernel/rcu/tree.c 	struct rcu_data *rdp;
rcu_data          737 kernel/rcu/tree.c 	rdp = this_cpu_ptr(&rcu_data);
rcu_data          803 kernel/rcu/tree.c 	struct rcu_data *rdp = this_cpu_ptr(&rcu_data);
rcu_data          923 kernel/rcu/tree.c 	smp_store_release(per_cpu_ptr(&rcu_data.rcu_urgent_qs, cpu), true);
rcu_data          943 kernel/rcu/tree.c 	struct rcu_data *rdp;
rcu_data          950 kernel/rcu/tree.c 	rdp = this_cpu_ptr(&rcu_data);
rcu_data          968 kernel/rcu/tree.c static void rcu_gpnum_ovf(struct rcu_node *rnp, struct rcu_data *rdp)
rcu_data          983 kernel/rcu/tree.c static int dyntick_save_progress_counter(struct rcu_data *rdp)
rcu_data         1000 kernel/rcu/tree.c static int rcu_implicit_dynticks_qs(struct rcu_data *rdp)
rcu_data         1054 kernel/rcu/tree.c 	ruqp = per_cpu_ptr(&rcu_data.rcu_urgent_qs, rdp->cpu);
rcu_data         1055 kernel/rcu/tree.c 	rnhqp = &per_cpu(rcu_data.rcu_need_heavy_qs, rdp->cpu);
rcu_data         1108 kernel/rcu/tree.c static void trace_rcu_this_gp(struct rcu_node *rnp, struct rcu_data *rdp,
rcu_data         1131 kernel/rcu/tree.c static bool rcu_start_this_gp(struct rcu_node *rnp_start, struct rcu_data *rdp,
rcu_data         1209 kernel/rcu/tree.c 	struct rcu_data *rdp = this_cpu_ptr(&rcu_data);
rcu_data         1257 kernel/rcu/tree.c static bool rcu_accelerate_cbs(struct rcu_node *rnp, struct rcu_data *rdp)
rcu_data         1299 kernel/rcu/tree.c 					struct rcu_data *rdp)
rcu_data         1328 kernel/rcu/tree.c static bool rcu_advance_cbs(struct rcu_node *rnp, struct rcu_data *rdp)
rcu_data         1352 kernel/rcu/tree.c 						  struct rcu_data *rdp)
rcu_data         1368 kernel/rcu/tree.c static bool __note_gp_changes(struct rcu_node *rnp, struct rcu_data *rdp)
rcu_data         1413 kernel/rcu/tree.c static void note_gp_changes(struct rcu_data *rdp)
rcu_data         1449 kernel/rcu/tree.c 	struct rcu_data *rdp;
rcu_data         1547 kernel/rcu/tree.c 		rdp = this_cpu_ptr(&rcu_data);
rcu_data         1690 kernel/rcu/tree.c 	struct rcu_data *rdp;
rcu_data         1728 kernel/rcu/tree.c 		rdp = this_cpu_ptr(&rcu_data);
rcu_data         1748 kernel/rcu/tree.c 	rdp = this_cpu_ptr(&rcu_data);
rcu_data         1947 kernel/rcu/tree.c rcu_report_qs_rdp(int cpu, struct rcu_data *rdp)
rcu_data         1997 kernel/rcu/tree.c rcu_check_quiescent_state(struct rcu_data *rdp)
rcu_data         2030 kernel/rcu/tree.c 	struct rcu_data *rdp = this_cpu_ptr(&rcu_data);
rcu_data         2095 kernel/rcu/tree.c 	struct rcu_data *rdp = per_cpu_ptr(&rcu_data, cpu);
rcu_data         2104 kernel/rcu/tree.c 	do_nocb_deferred_wakeup(per_cpu_ptr(&rcu_data, cpu));
rcu_data         2112 kernel/rcu/tree.c static void rcu_do_batch(struct rcu_data *rdp)
rcu_data         2234 kernel/rcu/tree.c 	raw_cpu_inc(rcu_data.ticks_this_gp);
rcu_data         2236 kernel/rcu/tree.c 	if (smp_load_acquire(this_cpu_ptr(&rcu_data.rcu_urgent_qs))) {
rcu_data         2242 kernel/rcu/tree.c 		__this_cpu_write(rcu_data.rcu_urgent_qs, false);
rcu_data         2258 kernel/rcu/tree.c static void force_qs_rnp(int (*f)(struct rcu_data *rdp))
rcu_data         2287 kernel/rcu/tree.c 				if (f(per_cpu_ptr(&rcu_data, cpu)))
rcu_data         2313 kernel/rcu/tree.c 	rnp = __this_cpu_read(rcu_data.mynode);
rcu_data         2343 kernel/rcu/tree.c 	struct rcu_data *rdp = raw_cpu_ptr(&rcu_data);
rcu_data         2406 kernel/rcu/tree.c 	__this_cpu_write(rcu_data.rcu_cpu_has_work, 1);
rcu_data         2407 kernel/rcu/tree.c 	t = __this_cpu_read(rcu_data.rcu_cpu_kthread_task);
rcu_data         2409 kernel/rcu/tree.c 		rcu_wake_cond(t, __this_cpu_read(rcu_data.rcu_cpu_kthread_status));
rcu_data         2428 kernel/rcu/tree.c 	per_cpu(rcu_data.rcu_cpu_kthread_status, cpu) = RCU_KTHREAD_OFFCPU;
rcu_data         2433 kernel/rcu/tree.c 	return __this_cpu_read(rcu_data.rcu_cpu_has_work);
rcu_data         2443 kernel/rcu/tree.c 	unsigned int *statusp = this_cpu_ptr(&rcu_data.rcu_cpu_kthread_status);
rcu_data         2444 kernel/rcu/tree.c 	char work, *workp = this_cpu_ptr(&rcu_data.rcu_cpu_has_work);
rcu_data         2472 kernel/rcu/tree.c 	.store			= &rcu_data.rcu_cpu_kthread_task,
rcu_data         2488 kernel/rcu/tree.c 		per_cpu(rcu_data.rcu_cpu_has_work, cpu) = 0;
rcu_data         2500 kernel/rcu/tree.c static void __call_rcu_core(struct rcu_data *rdp, struct rcu_head *head,
rcu_data         2559 kernel/rcu/tree.c 	struct rcu_data *rdp;
rcu_data         2579 kernel/rcu/tree.c 	rdp = this_cpu_ptr(&rcu_data);
rcu_data         2792 kernel/rcu/tree.c 	struct rcu_data *rdp = this_cpu_ptr(&rcu_data);
rcu_data         2861 kernel/rcu/tree.c 	struct rcu_data *rdp = raw_cpu_ptr(&rcu_data);
rcu_data         2889 kernel/rcu/tree.c 	struct rcu_data *rdp;
rcu_data         2926 kernel/rcu/tree.c 		rdp = per_cpu_ptr(&rcu_data, cpu);
rcu_data         2994 kernel/rcu/tree.c 	struct rcu_data *rdp = per_cpu_ptr(&rcu_data, cpu);
rcu_data         3021 kernel/rcu/tree.c 	struct rcu_data *rdp = per_cpu_ptr(&rcu_data, cpu);
rcu_data         3063 kernel/rcu/tree.c 	struct rcu_data *rdp = per_cpu_ptr(&rcu_data, cpu);
rcu_data         3075 kernel/rcu/tree.c 	struct rcu_data *rdp;
rcu_data         3078 kernel/rcu/tree.c 	rdp = per_cpu_ptr(&rcu_data, cpu);
rcu_data         3097 kernel/rcu/tree.c 	struct rcu_data *rdp;
rcu_data         3100 kernel/rcu/tree.c 	rdp = per_cpu_ptr(&rcu_data, cpu);
rcu_data         3129 kernel/rcu/tree.c 	struct rcu_data *rdp;
rcu_data         3137 kernel/rcu/tree.c 	rdp = per_cpu_ptr(&rcu_data, cpu);
rcu_data         3173 kernel/rcu/tree.c 	struct rcu_data *rdp = per_cpu_ptr(&rcu_data, cpu);
rcu_data         3178 kernel/rcu/tree.c 	rcu_report_exp_rdp(this_cpu_ptr(&rcu_data));
rcu_data         3208 kernel/rcu/tree.c 	struct rcu_data *my_rdp;
rcu_data         3210 kernel/rcu/tree.c 	struct rcu_data *rdp = per_cpu_ptr(&rcu_data, cpu);
rcu_data         3218 kernel/rcu/tree.c 	my_rdp = this_cpu_ptr(&rcu_data);
rcu_data         3412 kernel/rcu/tree.c 		per_cpu_ptr(&rcu_data, i)->mynode = rnp;
rcu_data          223 kernel/rcu/tree.h 	struct rcu_data *nocb_next_cb_rdp;
rcu_data          227 kernel/rcu/tree.h 	struct rcu_data *nocb_gp_rdp ____cacheline_internodealigned_in_smp;
rcu_data          405 kernel/rcu/tree.h int rcu_dynticks_snap(struct rcu_data *rdp);
rcu_data          430 kernel/rcu/tree.h static void zero_cpu_stall_ticks(struct rcu_data *rdp);
rcu_data          434 kernel/rcu/tree.h static bool rcu_nocb_flush_bypass(struct rcu_data *rdp, struct rcu_head *rhp,
rcu_data          436 kernel/rcu/tree.h static bool rcu_nocb_try_bypass(struct rcu_data *rdp, struct rcu_head *rhp,
rcu_data          438 kernel/rcu/tree.h static void __call_rcu_nocb_wake(struct rcu_data *rdp, bool was_empty,
rcu_data          440 kernel/rcu/tree.h static int rcu_nocb_need_deferred_wakeup(struct rcu_data *rdp);
rcu_data          441 kernel/rcu/tree.h static void do_nocb_deferred_wakeup(struct rcu_data *rdp);
rcu_data          442 kernel/rcu/tree.h static void rcu_boot_init_nocb_percpu_data(struct rcu_data *rdp);
rcu_data          445 kernel/rcu/tree.h static void show_rcu_nocb_state(struct rcu_data *rdp);
rcu_data          446 kernel/rcu/tree.h static void rcu_nocb_lock(struct rcu_data *rdp);
rcu_data          447 kernel/rcu/tree.h static void rcu_nocb_unlock(struct rcu_data *rdp);
rcu_data          448 kernel/rcu/tree.h static void rcu_nocb_unlock_irqrestore(struct rcu_data *rdp,
rcu_data          450 kernel/rcu/tree.h static void rcu_lockdep_assert_cblist_protected(struct rcu_data *rdp);
rcu_data          472 kernel/rcu/tree.h static void check_cpu_stall(struct rcu_data *rdp);
rcu_data          473 kernel/rcu/tree.h static void rcu_check_gp_start_stall(struct rcu_node *rnp, struct rcu_data *rdp,
rcu_data          251 kernel/rcu/tree_exp.h static void rcu_report_exp_rdp(struct rcu_data *rdp)
rcu_data          277 kernel/rcu/tree_exp.h 	struct rcu_data *rdp = per_cpu_ptr(&rcu_data, raw_smp_processor_id());
rcu_data          349 kernel/rcu/tree_exp.h 		struct rcu_data *rdp = per_cpu_ptr(&rcu_data, cpu);
rcu_data          377 kernel/rcu/tree_exp.h 		struct rcu_data *rdp = per_cpu_ptr(&rcu_data, cpu);
rcu_data          489 kernel/rcu/tree_exp.h 				struct rcu_data *rdp;
rcu_data          495 kernel/rcu/tree_exp.h 				rdp = per_cpu_ptr(&rcu_data, cpu);
rcu_data          602 kernel/rcu/tree_exp.h 	struct rcu_data *rdp = this_cpu_ptr(&rcu_data);
rcu_data          702 kernel/rcu/tree_exp.h 	__this_cpu_write(rcu_data.cpu_no_qs.b.exp, true);
rcu_data          704 kernel/rcu/tree_exp.h 	smp_store_release(this_cpu_ptr(&rcu_data.rcu_urgent_qs), true);
rcu_data          712 kernel/rcu/tree_exp.h 	struct rcu_data *rdp;
rcu_data          715 kernel/rcu/tree_exp.h 	rdp = this_cpu_ptr(&rcu_data);
rcu_data          718 kernel/rcu/tree_exp.h 	    __this_cpu_read(rcu_data.cpu_no_qs.b.exp))
rcu_data          721 kernel/rcu/tree_exp.h 		rcu_report_exp_rdp(this_cpu_ptr(&rcu_data));
rcu_data          732 kernel/rcu/tree_exp.h 	struct rcu_data *rdp;
rcu_data          736 kernel/rcu/tree_exp.h 	rdp = per_cpu_ptr(&rcu_data, cpu);
rcu_data          741 kernel/rcu/tree_exp.h 	    __this_cpu_read(rcu_data.cpu_no_qs.b.exp)) {
rcu_data          130 kernel/rcu/tree_plugin.h static void rcu_preempt_ctxt_queue(struct rcu_node *rnp, struct rcu_data *rdp)
rcu_data          262 kernel/rcu/tree_plugin.h 	if (__this_cpu_read(rcu_data.cpu_no_qs.s)) {
rcu_data          264 kernel/rcu/tree_plugin.h 				       __this_cpu_read(rcu_data.gp_seq),
rcu_data          266 kernel/rcu/tree_plugin.h 		__this_cpu_write(rcu_data.cpu_no_qs.b.norm, false);
rcu_data          288 kernel/rcu/tree_plugin.h 	struct rcu_data *rdp = this_cpu_ptr(&rcu_data);
rcu_data          432 kernel/rcu/tree_plugin.h 	struct rcu_data *rdp;
rcu_data          442 kernel/rcu/tree_plugin.h 	rdp = this_cpu_ptr(&rcu_data);
rcu_data          552 kernel/rcu/tree_plugin.h 	return (__this_cpu_read(rcu_data.exp_deferred_qs) ||
rcu_data          584 kernel/rcu/tree_plugin.h 	struct rcu_data *rdp;
rcu_data          586 kernel/rcu/tree_plugin.h 	rdp = container_of(iwp, struct rcu_data, defer_qs_iw);
rcu_data          610 kernel/rcu/tree_plugin.h 		struct rcu_data *rdp = this_cpu_ptr(&rcu_data);
rcu_data          706 kernel/rcu/tree_plugin.h 	    __this_cpu_read(rcu_data.core_needs_qs) &&
rcu_data          707 kernel/rcu/tree_plugin.h 	    __this_cpu_read(rcu_data.cpu_no_qs.b.norm) &&
rcu_data          749 kernel/rcu/tree_plugin.h 	struct rcu_data *rdp;
rcu_data          771 kernel/rcu/tree_plugin.h 		rdp = per_cpu_ptr(&rcu_data, cpu);
rcu_data          800 kernel/rcu/tree_plugin.h 	if (!__this_cpu_read(rcu_data.cpu_no_qs.s))
rcu_data          803 kernel/rcu/tree_plugin.h 			       __this_cpu_read(rcu_data.gp_seq), TPS("cpuqs"));
rcu_data          804 kernel/rcu/tree_plugin.h 	__this_cpu_write(rcu_data.cpu_no_qs.b.norm, false);
rcu_data          805 kernel/rcu/tree_plugin.h 	if (!__this_cpu_read(rcu_data.cpu_no_qs.b.exp))
rcu_data          807 kernel/rcu/tree_plugin.h 	__this_cpu_write(rcu_data.cpu_no_qs.b.exp, false);
rcu_data          808 kernel/rcu/tree_plugin.h 	rcu_report_exp_rdp(this_cpu_ptr(&rcu_data));
rcu_data          822 kernel/rcu/tree_plugin.h 	if (!raw_cpu_read(rcu_data.rcu_urgent_qs))
rcu_data          826 kernel/rcu/tree_plugin.h 	if (!smp_load_acquire(this_cpu_ptr(&rcu_data.rcu_urgent_qs))) {
rcu_data          830 kernel/rcu/tree_plugin.h 	this_cpu_write(rcu_data.rcu_urgent_qs, false);
rcu_data          831 kernel/rcu/tree_plugin.h 	if (unlikely(raw_cpu_read(rcu_data.rcu_need_heavy_qs))) {
rcu_data          849 kernel/rcu/tree_plugin.h 	if (!smp_load_acquire(this_cpu_ptr(&rcu_data.rcu_urgent_qs)))
rcu_data          851 kernel/rcu/tree_plugin.h 	this_cpu_write(rcu_data.rcu_urgent_qs, false);
rcu_data          852 kernel/rcu/tree_plugin.h 	if (unlikely(raw_cpu_read(rcu_data.rcu_need_heavy_qs)))
rcu_data         1096 kernel/rcu/tree_plugin.h 	return __this_cpu_read(rcu_data.rcu_cpu_kthread_task) == current;
rcu_data         1188 kernel/rcu/tree_plugin.h 	struct rcu_data *rdp = per_cpu_ptr(&rcu_data, cpu);
rcu_data         1241 kernel/rcu/tree_plugin.h 	return !rcu_segcblist_empty(&this_cpu_ptr(&rcu_data)->cblist) &&
rcu_data         1242 kernel/rcu/tree_plugin.h 	       !rcu_segcblist_is_offloaded(&this_cpu_ptr(&rcu_data)->cblist);
rcu_data         1302 kernel/rcu/tree_plugin.h 	struct rcu_data *rdp = this_cpu_ptr(&rcu_data);
rcu_data         1338 kernel/rcu/tree_plugin.h 	struct rcu_data *rdp = this_cpu_ptr(&rcu_data);
rcu_data         1345 kernel/rcu/tree_plugin.h 	    rcu_segcblist_is_offloaded(&this_cpu_ptr(&rcu_data)->cblist)) {
rcu_data         1383 kernel/rcu/tree_plugin.h 	struct rcu_data *rdp = this_cpu_ptr(&rcu_data);
rcu_data         1437 kernel/rcu/tree_plugin.h 	struct rcu_data *rdp = this_cpu_ptr(&rcu_data);
rcu_data         1513 kernel/rcu/tree_plugin.h static void rcu_nocb_bypass_lock(struct rcu_data *rdp)
rcu_data         1536 kernel/rcu/tree_plugin.h static void rcu_nocb_wait_contended(struct rcu_data *rdp)
rcu_data         1547 kernel/rcu/tree_plugin.h static bool rcu_nocb_bypass_trylock(struct rcu_data *rdp)
rcu_data         1556 kernel/rcu/tree_plugin.h static void rcu_nocb_bypass_unlock(struct rcu_data *rdp)
rcu_data         1566 kernel/rcu/tree_plugin.h static void rcu_nocb_lock(struct rcu_data *rdp)
rcu_data         1578 kernel/rcu/tree_plugin.h static void rcu_nocb_unlock(struct rcu_data *rdp)
rcu_data         1590 kernel/rcu/tree_plugin.h static void rcu_nocb_unlock_irqrestore(struct rcu_data *rdp,
rcu_data         1602 kernel/rcu/tree_plugin.h static void rcu_lockdep_assert_cblist_protected(struct rcu_data *rdp)
rcu_data         1642 kernel/rcu/tree_plugin.h static void wake_nocb_gp(struct rcu_data *rdp, bool force,
rcu_data         1647 kernel/rcu/tree_plugin.h 	struct rcu_data *rdp_gp = rdp->nocb_gp_rdp;
rcu_data         1673 kernel/rcu/tree_plugin.h static void wake_nocb_gp_defer(struct rcu_data *rdp, int waketype,
rcu_data         1691 kernel/rcu/tree_plugin.h static bool rcu_nocb_do_flush_bypass(struct rcu_data *rdp, struct rcu_head *rhp,
rcu_data         1721 kernel/rcu/tree_plugin.h static bool rcu_nocb_flush_bypass(struct rcu_data *rdp, struct rcu_head *rhp,
rcu_data         1735 kernel/rcu/tree_plugin.h static void rcu_nocb_try_flush_bypass(struct rcu_data *rdp, unsigned long j)
rcu_data         1762 kernel/rcu/tree_plugin.h static bool rcu_nocb_try_bypass(struct rcu_data *rdp, struct rcu_head *rhp,
rcu_data         1872 kernel/rcu/tree_plugin.h static void __call_rcu_nocb_wake(struct rcu_data *rdp, bool was_alldone,
rcu_data         1931 kernel/rcu/tree_plugin.h 	struct rcu_data *rdp = from_timer(rdp, t, nocb_bypass_timer);
rcu_data         1943 kernel/rcu/tree_plugin.h static void nocb_gp_wait(struct rcu_data *my_rdp)
rcu_data         1955 kernel/rcu/tree_plugin.h 	struct rcu_data *rdp;
rcu_data         2077 kernel/rcu/tree_plugin.h 	struct rcu_data *rdp = arg;
rcu_data         2091 kernel/rcu/tree_plugin.h static void nocb_cb_wait(struct rcu_data *rdp)
rcu_data         2140 kernel/rcu/tree_plugin.h 	struct rcu_data *rdp = arg;
rcu_data         2152 kernel/rcu/tree_plugin.h static int rcu_nocb_need_deferred_wakeup(struct rcu_data *rdp)
rcu_data         2158 kernel/rcu/tree_plugin.h static void do_nocb_deferred_wakeup_common(struct rcu_data *rdp)
rcu_data         2177 kernel/rcu/tree_plugin.h 	struct rcu_data *rdp = from_timer(rdp, t, nocb_timer);
rcu_data         2187 kernel/rcu/tree_plugin.h static void do_nocb_deferred_wakeup(struct rcu_data *rdp)
rcu_data         2197 kernel/rcu/tree_plugin.h 	struct rcu_data *rdp;
rcu_data         2232 kernel/rcu/tree_plugin.h 		rdp = per_cpu_ptr(&rcu_data, cpu);
rcu_data         2241 kernel/rcu/tree_plugin.h static void __init rcu_boot_init_nocb_percpu_data(struct rcu_data *rdp)
rcu_data         2260 kernel/rcu/tree_plugin.h 	struct rcu_data *rdp = per_cpu_ptr(&rcu_data, cpu);
rcu_data         2261 kernel/rcu/tree_plugin.h 	struct rcu_data *rdp_gp;
rcu_data         2329 kernel/rcu/tree_plugin.h 	struct rcu_data *rdp;
rcu_data         2330 kernel/rcu/tree_plugin.h 	struct rcu_data *rdp_gp = NULL;  /* Suppress misguided gcc warn. */
rcu_data         2331 kernel/rcu/tree_plugin.h 	struct rcu_data *rdp_prev = NULL;
rcu_data         2346 kernel/rcu/tree_plugin.h 		rdp = per_cpu_ptr(&rcu_data, cpu);
rcu_data         2391 kernel/rcu/tree_plugin.h static void show_rcu_nocb_gp_state(struct rcu_data *rdp)
rcu_data         2413 kernel/rcu/tree_plugin.h static void show_rcu_nocb_state(struct rcu_data *rdp)
rcu_data         2463 kernel/rcu/tree_plugin.h static void rcu_nocb_lock(struct rcu_data *rdp)
rcu_data         2468 kernel/rcu/tree_plugin.h static void rcu_nocb_unlock(struct rcu_data *rdp)
rcu_data         2473 kernel/rcu/tree_plugin.h static void rcu_nocb_unlock_irqrestore(struct rcu_data *rdp,
rcu_data         2480 kernel/rcu/tree_plugin.h static void rcu_lockdep_assert_cblist_protected(struct rcu_data *rdp)
rcu_data         2498 kernel/rcu/tree_plugin.h static bool rcu_nocb_flush_bypass(struct rcu_data *rdp, struct rcu_head *rhp,
rcu_data         2504 kernel/rcu/tree_plugin.h static bool rcu_nocb_try_bypass(struct rcu_data *rdp, struct rcu_head *rhp,
rcu_data         2510 kernel/rcu/tree_plugin.h static void __call_rcu_nocb_wake(struct rcu_data *rdp, bool was_empty,
rcu_data         2516 kernel/rcu/tree_plugin.h static void __init rcu_boot_init_nocb_percpu_data(struct rcu_data *rdp)
rcu_data         2520 kernel/rcu/tree_plugin.h static int rcu_nocb_need_deferred_wakeup(struct rcu_data *rdp)
rcu_data         2525 kernel/rcu/tree_plugin.h static void do_nocb_deferred_wakeup(struct rcu_data *rdp)
rcu_data         2537 kernel/rcu/tree_plugin.h static void show_rcu_nocb_state(struct rcu_data *rdp)
rcu_data          114 kernel/rcu/tree_stall.h static void zero_cpu_stall_ticks(struct rcu_data *rdp)
rcu_data          149 kernel/rcu/tree_stall.h 	struct rcu_data *rdp;
rcu_data          152 kernel/rcu/tree_stall.h 	rdp = container_of(iwp, struct rcu_data, rcu_iw);
rcu_data          264 kernel/rcu/tree_stall.h 	struct rcu_data *rdp = &per_cpu(rcu_data, cpu);
rcu_data          297 kernel/rcu/tree_stall.h 	struct rcu_data *rdp = per_cpu_ptr(&rcu_data, cpu);
rcu_data          429 kernel/rcu/tree_stall.h 	struct rcu_data *rdp = this_cpu_ptr(&rcu_data);
rcu_data          477 kernel/rcu/tree_stall.h static void check_cpu_stall(struct rcu_data *rdp)
rcu_data          559 kernel/rcu/tree_stall.h 	struct rcu_data *rdp;
rcu_data          583 kernel/rcu/tree_stall.h 			rdp = per_cpu_ptr(&rcu_data, cpu);
rcu_data          593 kernel/rcu/tree_stall.h 		rdp = per_cpu_ptr(&rcu_data, cpu);
rcu_data          605 kernel/rcu/tree_stall.h static void rcu_check_gp_start_stall(struct rcu_node *rnp, struct rcu_data *rdp,
rcu_data          666 kernel/rcu/tree_stall.h 	struct rcu_data *rdp;
rcu_data          676 kernel/rcu/tree_stall.h 		rdp = this_cpu_ptr(&rcu_data);