rcu_state 320 kernel/rcu/rcu.h #define rcu_first_leaf_node() (rcu_state.level[rcu_num_lvls - 1]) rcu_state 326 kernel/rcu/rcu.h #define rcu_is_last_leaf_node(rnp) ((rnp) == &rcu_state.node[rcu_num_nodes - 1]) rcu_state 337 kernel/rcu/rcu.h srcu_for_each_node_breadth_first(&rcu_state, rnp) rcu_state 347 kernel/rcu/rcu.h (rnp) < &rcu_state.node[rcu_num_nodes]; (rnp)++) rcu_state 87 kernel/rcu/tree.c struct rcu_state rcu_state = { rcu_state 88 kernel/rcu/tree.c .level = { &rcu_state.node[0] }, rcu_state 91 kernel/rcu/tree.c .barrier_mutex = __MUTEX_INITIALIZER(rcu_state.barrier_mutex), rcu_state 94 kernel/rcu/tree.c .exp_mutex = __MUTEX_INITIALIZER(rcu_state.exp_mutex), rcu_state 95 kernel/rcu/tree.c .exp_wake_mutex = __MUTEX_INITIALIZER(rcu_state.exp_wake_mutex), rcu_state 96 kernel/rcu/tree.c .ofl_lock = __RAW_SPIN_LOCK_UNLOCKED(rcu_state.ofl_lock), rcu_state 203 kernel/rcu/tree.c return rcu_seq_state(rcu_seq_current(&rcu_state.gp_seq)); rcu_state 506 kernel/rcu/tree.c return READ_ONCE(rcu_state.gp_seq); rcu_state 518 kernel/rcu/tree.c return rcu_state.expedited_sequence; rcu_state 527 kernel/rcu/tree.c return &rcu_state.node[0]; rcu_state 548 kernel/rcu/tree.c *flags = READ_ONCE(rcu_state.gp_flags); rcu_state 549 kernel/rcu/tree.c *gp_seq = rcu_seq_current(&rcu_state.gp_seq); rcu_state 987 kernel/rcu/tree.c trace_rcu_fqs(rcu_state.name, rdp->gp_seq, rdp->cpu, TPS("dti")); rcu_state 1016 kernel/rcu/tree.c trace_rcu_fqs(rcu_state.name, rdp->gp_seq, rdp->cpu, TPS("dti")); rcu_state 1023 kernel/rcu/tree.c time_after(jiffies, rcu_state.gp_start + HZ)) { rcu_state 1057 kernel/rcu/tree.c (time_after(jiffies, rcu_state.gp_start + jtsq * 2) || rcu_state 1058 kernel/rcu/tree.c time_after(jiffies, rcu_state.jiffies_resched))) { rcu_state 1062 kernel/rcu/tree.c } else if (time_after(jiffies, rcu_state.gp_start + jtsq)) { rcu_state 1088 kernel/rcu/tree.c if (time_after(jiffies, rcu_state.jiffies_resched)) { rcu_state 1111 kernel/rcu/tree.c trace_rcu_future_grace_period(rcu_state.name, rnp->gp_seq, gp_seq_req, rcu_state 1183 kernel/rcu/tree.c WRITE_ONCE(rcu_state.gp_flags, rcu_state.gp_flags | RCU_GP_FLAG_INIT); rcu_state 1184 kernel/rcu/tree.c rcu_state.gp_req_activity = jiffies; rcu_state 1185 kernel/rcu/tree.c if (!rcu_state.gp_kthread) { rcu_state 1189 kernel/rcu/tree.c trace_rcu_grace_period(rcu_state.name, READ_ONCE(rcu_state.gp_seq), TPS("newreq")); rcu_state 1235 kernel/rcu/tree.c if ((current == rcu_state.gp_kthread && rcu_state 1237 kernel/rcu/tree.c !READ_ONCE(rcu_state.gp_flags) || rcu_state 1238 kernel/rcu/tree.c !rcu_state.gp_kthread) rcu_state 1240 kernel/rcu/tree.c WRITE_ONCE(rcu_state.gp_wake_time, jiffies); rcu_state 1241 kernel/rcu/tree.c WRITE_ONCE(rcu_state.gp_wake_seq, READ_ONCE(rcu_state.gp_seq)); rcu_state 1242 kernel/rcu/tree.c swake_up_one(&rcu_state.gp_wq); rcu_state 1279 kernel/rcu/tree.c gp_seq_req = rcu_seq_snap(&rcu_state.gp_seq); rcu_state 1285 kernel/rcu/tree.c trace_rcu_grace_period(rcu_state.name, rdp->gp_seq, TPS("AccWaitCB")); rcu_state 1287 kernel/rcu/tree.c trace_rcu_grace_period(rcu_state.name, rdp->gp_seq, TPS("AccReadyCB")); rcu_state 1305 kernel/rcu/tree.c c = rcu_seq_snap(&rcu_state.gp_seq); rcu_state 1385 kernel/rcu/tree.c trace_rcu_grace_period(rcu_state.name, rdp->gp_seq, TPS("cpuend")); rcu_state 1399 kernel/rcu/tree.c trace_rcu_grace_period(rcu_state.name, rnp->gp_seq, TPS("cpustart")); rcu_state 1436 kernel/rcu/tree.c !(rcu_seq_ctr(rcu_state.gp_seq) % rcu_state 1452 kernel/rcu/tree.c WRITE_ONCE(rcu_state.gp_activity, jiffies); rcu_state 1454 kernel/rcu/tree.c if (!READ_ONCE(rcu_state.gp_flags)) { rcu_state 1459 kernel/rcu/tree.c WRITE_ONCE(rcu_state.gp_flags, 0); /* Clear all flags: New GP. */ rcu_state 1473 kernel/rcu/tree.c rcu_seq_start(&rcu_state.gp_seq); rcu_state 1474 kernel/rcu/tree.c trace_rcu_grace_period(rcu_state.name, rcu_state.gp_seq, TPS("start")); rcu_state 1483 kernel/rcu/tree.c rcu_state.gp_state = RCU_GP_ONOFF; rcu_state 1485 kernel/rcu/tree.c raw_spin_lock(&rcu_state.ofl_lock); rcu_state 1491 kernel/rcu/tree.c raw_spin_unlock(&rcu_state.ofl_lock); rcu_state 1527 kernel/rcu/tree.c raw_spin_unlock(&rcu_state.ofl_lock); rcu_state 1543 kernel/rcu/tree.c rcu_state.gp_state = RCU_GP_INIT; rcu_state 1550 kernel/rcu/tree.c WRITE_ONCE(rnp->gp_seq, rcu_state.gp_seq); rcu_state 1554 kernel/rcu/tree.c trace_rcu_grace_period_init(rcu_state.name, rnp->gp_seq, rcu_state 1565 kernel/rcu/tree.c WRITE_ONCE(rcu_state.gp_activity, jiffies); rcu_state 1580 kernel/rcu/tree.c *gfp = READ_ONCE(rcu_state.gp_flags); rcu_state 1598 kernel/rcu/tree.c WRITE_ONCE(rcu_state.gp_activity, jiffies); rcu_state 1599 kernel/rcu/tree.c rcu_state.n_force_qs++; rcu_state 1608 kernel/rcu/tree.c if (READ_ONCE(rcu_state.gp_flags) & RCU_GP_FLAG_FQS) { rcu_state 1610 kernel/rcu/tree.c WRITE_ONCE(rcu_state.gp_flags, rcu_state 1611 kernel/rcu/tree.c READ_ONCE(rcu_state.gp_flags) & ~RCU_GP_FLAG_FQS); rcu_state 1632 kernel/rcu/tree.c rcu_state.jiffies_force_qs = jiffies + j; rcu_state 1633 kernel/rcu/tree.c WRITE_ONCE(rcu_state.jiffies_kick_kthreads, rcu_state 1636 kernel/rcu/tree.c trace_rcu_grace_period(rcu_state.name, rcu_state 1637 kernel/rcu/tree.c READ_ONCE(rcu_state.gp_seq), rcu_state 1639 kernel/rcu/tree.c rcu_state.gp_state = RCU_GP_WAIT_FQS; rcu_state 1641 kernel/rcu/tree.c rcu_state.gp_wq, rcu_gp_fqs_check_wake(&gf), j); rcu_state 1642 kernel/rcu/tree.c rcu_state.gp_state = RCU_GP_DOING_FQS; rcu_state 1649 kernel/rcu/tree.c if (ULONG_CMP_GE(jiffies, rcu_state.jiffies_force_qs) || rcu_state 1651 kernel/rcu/tree.c trace_rcu_grace_period(rcu_state.name, rcu_state 1652 kernel/rcu/tree.c READ_ONCE(rcu_state.gp_seq), rcu_state 1656 kernel/rcu/tree.c trace_rcu_grace_period(rcu_state.name, rcu_state 1657 kernel/rcu/tree.c READ_ONCE(rcu_state.gp_seq), rcu_state 1660 kernel/rcu/tree.c WRITE_ONCE(rcu_state.gp_activity, jiffies); rcu_state 1666 kernel/rcu/tree.c WRITE_ONCE(rcu_state.gp_activity, jiffies); rcu_state 1668 kernel/rcu/tree.c trace_rcu_grace_period(rcu_state.name, rcu_state 1669 kernel/rcu/tree.c READ_ONCE(rcu_state.gp_seq), rcu_state 1673 kernel/rcu/tree.c if (time_after(jiffies, rcu_state.jiffies_force_qs)) rcu_state 1676 kernel/rcu/tree.c j = rcu_state.jiffies_force_qs - j; rcu_state 1694 kernel/rcu/tree.c WRITE_ONCE(rcu_state.gp_activity, jiffies); rcu_state 1696 kernel/rcu/tree.c rcu_state.gp_end = jiffies; rcu_state 1697 kernel/rcu/tree.c gp_duration = rcu_state.gp_end - rcu_state.gp_start; rcu_state 1698 kernel/rcu/tree.c if (gp_duration > rcu_state.gp_max) rcu_state 1699 kernel/rcu/tree.c rcu_state.gp_max = gp_duration; rcu_state 1720 kernel/rcu/tree.c new_gp_seq = rcu_state.gp_seq; rcu_state 1737 kernel/rcu/tree.c WRITE_ONCE(rcu_state.gp_activity, jiffies); rcu_state 1744 kernel/rcu/tree.c trace_rcu_grace_period(rcu_state.name, rcu_state.gp_seq, TPS("end")); rcu_state 1745 kernel/rcu/tree.c rcu_seq_end(&rcu_state.gp_seq); rcu_state 1746 kernel/rcu/tree.c rcu_state.gp_state = RCU_GP_IDLE; rcu_state 1758 kernel/rcu/tree.c WRITE_ONCE(rcu_state.gp_flags, RCU_GP_FLAG_INIT); rcu_state 1759 kernel/rcu/tree.c rcu_state.gp_req_activity = jiffies; rcu_state 1760 kernel/rcu/tree.c trace_rcu_grace_period(rcu_state.name, rcu_state 1761 kernel/rcu/tree.c READ_ONCE(rcu_state.gp_seq), rcu_state 1764 kernel/rcu/tree.c WRITE_ONCE(rcu_state.gp_flags, rcu_state 1765 kernel/rcu/tree.c rcu_state.gp_flags & RCU_GP_FLAG_INIT); rcu_state 1780 kernel/rcu/tree.c trace_rcu_grace_period(rcu_state.name, rcu_state 1781 kernel/rcu/tree.c READ_ONCE(rcu_state.gp_seq), rcu_state 1783 kernel/rcu/tree.c rcu_state.gp_state = RCU_GP_WAIT_GPS; rcu_state 1784 kernel/rcu/tree.c swait_event_idle_exclusive(rcu_state.gp_wq, rcu_state 1785 kernel/rcu/tree.c READ_ONCE(rcu_state.gp_flags) & rcu_state 1787 kernel/rcu/tree.c rcu_state.gp_state = RCU_GP_DONE_GPS; rcu_state 1792 kernel/rcu/tree.c WRITE_ONCE(rcu_state.gp_activity, jiffies); rcu_state 1794 kernel/rcu/tree.c trace_rcu_grace_period(rcu_state.name, rcu_state 1795 kernel/rcu/tree.c READ_ONCE(rcu_state.gp_seq), rcu_state 1803 kernel/rcu/tree.c rcu_state.gp_state = RCU_GP_CLEANUP; rcu_state 1805 kernel/rcu/tree.c rcu_state.gp_state = RCU_GP_CLEANED; rcu_state 1823 kernel/rcu/tree.c WRITE_ONCE(rcu_state.gp_flags, rcu_state 1824 kernel/rcu/tree.c READ_ONCE(rcu_state.gp_flags) | RCU_GP_FLAG_FQS); rcu_state 1867 kernel/rcu/tree.c trace_rcu_quiescent_state_report(rcu_state.name, rnp->gp_seq, rcu_state 2037 kernel/rcu/tree.c trace_rcu_grace_period(rcu_state.name, rnp->gp_seq, rcu_state 2124 kernel/rcu/tree.c trace_rcu_batch_start(rcu_state.name, rcu_state 2127 kernel/rcu/tree.c trace_rcu_batch_end(rcu_state.name, 0, rcu_state 2146 kernel/rcu/tree.c trace_rcu_batch_start(rcu_state.name, rcu_state 2158 kernel/rcu/tree.c if (__rcu_reclaim(rcu_state.name, rhp)) rcu_state 2188 kernel/rcu/tree.c trace_rcu_batch_end(rcu_state.name, count, !!rcl.head, need_resched(), rcu_state 2204 kernel/rcu/tree.c rdp->n_force_qs_snap = rcu_state.n_force_qs; rcu_state 2315 kernel/rcu/tree.c ret = (READ_ONCE(rcu_state.gp_flags) & RCU_GP_FLAG_FQS) || rcu_state 2328 kernel/rcu/tree.c if (READ_ONCE(rcu_state.gp_flags) & RCU_GP_FLAG_FQS) { rcu_state 2332 kernel/rcu/tree.c WRITE_ONCE(rcu_state.gp_flags, rcu_state 2333 kernel/rcu/tree.c READ_ONCE(rcu_state.gp_flags) | RCU_GP_FLAG_FQS); rcu_state 2533 kernel/rcu/tree.c if (rcu_state.n_force_qs == rdp->n_force_qs_snap && rcu_state 2536 kernel/rcu/tree.c rdp->n_force_qs_snap = rcu_state.n_force_qs; rcu_state 2596 kernel/rcu/tree.c trace_rcu_kfree_callback(rcu_state.name, head, rcu_state 2601 kernel/rcu/tree.c trace_rcu_callback(rcu_state.name, head, rcu_state 2756 kernel/rcu/tree.c return rcu_seq_snap(&rcu_state.gp_seq); rcu_state 2776 kernel/rcu/tree.c if (!rcu_seq_done(&rcu_state.gp_seq, oldstate)) rcu_state 2837 kernel/rcu/tree.c trace_rcu_barrier(rcu_state.name, s, cpu, rcu_state 2838 kernel/rcu/tree.c atomic_read(&rcu_state.barrier_cpu_count), done); rcu_state 2847 kernel/rcu/tree.c if (atomic_dec_and_test(&rcu_state.barrier_cpu_count)) { rcu_state 2849 kernel/rcu/tree.c rcu_state.barrier_sequence); rcu_state 2850 kernel/rcu/tree.c complete(&rcu_state.barrier_completion); rcu_state 2852 kernel/rcu/tree.c rcu_barrier_trace(TPS("CB"), -1, rcu_state.barrier_sequence); rcu_state 2863 kernel/rcu/tree.c rcu_barrier_trace(TPS("IRQ"), -1, rcu_state.barrier_sequence); rcu_state 2869 kernel/rcu/tree.c atomic_inc(&rcu_state.barrier_cpu_count); rcu_state 2873 kernel/rcu/tree.c rcu_state.barrier_sequence); rcu_state 2890 kernel/rcu/tree.c unsigned long s = rcu_seq_snap(&rcu_state.barrier_sequence); rcu_state 2895 kernel/rcu/tree.c mutex_lock(&rcu_state.barrier_mutex); rcu_state 2898 kernel/rcu/tree.c if (rcu_seq_done(&rcu_state.barrier_sequence, s)) { rcu_state 2900 kernel/rcu/tree.c rcu_state.barrier_sequence); rcu_state 2902 kernel/rcu/tree.c mutex_unlock(&rcu_state.barrier_mutex); rcu_state 2907 kernel/rcu/tree.c rcu_seq_start(&rcu_state.barrier_sequence); rcu_state 2908 kernel/rcu/tree.c rcu_barrier_trace(TPS("Inc1"), -1, rcu_state.barrier_sequence); rcu_state 2916 kernel/rcu/tree.c init_completion(&rcu_state.barrier_completion); rcu_state 2917 kernel/rcu/tree.c atomic_set(&rcu_state.barrier_cpu_count, 1); rcu_state 2932 kernel/rcu/tree.c rcu_state.barrier_sequence); rcu_state 2936 kernel/rcu/tree.c rcu_state.barrier_sequence); rcu_state 2945 kernel/rcu/tree.c if (atomic_dec_and_test(&rcu_state.barrier_cpu_count)) rcu_state 2946 kernel/rcu/tree.c complete(&rcu_state.barrier_completion); rcu_state 2949 kernel/rcu/tree.c wait_for_completion(&rcu_state.barrier_completion); rcu_state 2952 kernel/rcu/tree.c rcu_barrier_trace(TPS("Inc2"), -1, rcu_state.barrier_sequence); rcu_state 2953 kernel/rcu/tree.c rcu_seq_end(&rcu_state.barrier_sequence); rcu_state 2956 kernel/rcu/tree.c mutex_unlock(&rcu_state.barrier_mutex); rcu_state 3000 kernel/rcu/tree.c rdp->rcu_ofl_gp_seq = rcu_state.gp_seq; rcu_state 3002 kernel/rcu/tree.c rdp->rcu_onl_gp_seq = rcu_state.gp_seq; rcu_state 3027 kernel/rcu/tree.c rdp->n_force_qs_snap = rcu_state.n_force_qs; rcu_state 3050 kernel/rcu/tree.c trace_rcu_grace_period(rcu_state.name, rdp->gp_seq, TPS("cpuonl")); rcu_state 3147 kernel/rcu/tree.c smp_store_release(&rcu_state.ncpus, rcu_state.ncpus + nbits); /* ^^^ */ rcu_state 3149 kernel/rcu/tree.c rdp->rcu_onl_gp_seq = READ_ONCE(rcu_state.gp_seq); rcu_state 3150 kernel/rcu/tree.c rdp->rcu_onl_gp_flags = READ_ONCE(rcu_state.gp_flags); rcu_state 3184 kernel/rcu/tree.c raw_spin_lock(&rcu_state.ofl_lock); rcu_state 3186 kernel/rcu/tree.c rdp->rcu_ofl_gp_seq = READ_ONCE(rcu_state.gp_seq); rcu_state 3187 kernel/rcu/tree.c rdp->rcu_ofl_gp_flags = READ_ONCE(rcu_state.gp_flags); rcu_state 3195 kernel/rcu/tree.c raw_spin_unlock(&rcu_state.ofl_lock); rcu_state 3298 kernel/rcu/tree.c t = kthread_create(rcu_gp_kthread, NULL, "%s", rcu_state.name); rcu_state 3307 kernel/rcu/tree.c rcu_state.gp_kthread = t; rcu_state 3360 kernel/rcu/tree.c rcu_state.level[i] = rcu_state 3361 kernel/rcu/tree.c rcu_state.level[i - 1] + num_rcu_lvl[i - 1]; rcu_state 3368 kernel/rcu/tree.c rnp = rcu_state.level[i]; rcu_state 3376 kernel/rcu/tree.c rnp->gp_seq = rcu_state.gp_seq; rcu_state 3377 kernel/rcu/tree.c rnp->gp_seq_needed = rcu_state.gp_seq; rcu_state 3378 kernel/rcu/tree.c rnp->completedqs = rcu_state.gp_seq; rcu_state 3392 kernel/rcu/tree.c rnp->parent = rcu_state.level[i - 1] + rcu_state 3406 kernel/rcu/tree.c init_swait_queue_head(&rcu_state.gp_wq); rcu_state 3407 kernel/rcu/tree.c init_swait_queue_head(&rcu_state.expedited_wq); rcu_state 20 kernel/rcu/tree_exp.h rcu_seq_start(&rcu_state.expedited_sequence); rcu_state 29 kernel/rcu/tree_exp.h return rcu_seq_endval(&rcu_state.expedited_sequence); rcu_state 37 kernel/rcu/tree_exp.h rcu_seq_end(&rcu_state.expedited_sequence); rcu_state 49 kernel/rcu/tree_exp.h s = rcu_seq_snap(&rcu_state.expedited_sequence); rcu_state 50 kernel/rcu/tree_exp.h trace_rcu_exp_grace_period(rcu_state.name, s, TPS("snap")); rcu_state 61 kernel/rcu/tree_exp.h return rcu_seq_done(&rcu_state.expedited_sequence, s); rcu_state 77 kernel/rcu/tree_exp.h int ncpus = smp_load_acquire(&rcu_state.ncpus); /* Order vs. locking. */ rcu_state 82 kernel/rcu/tree_exp.h if (likely(ncpus == rcu_state.ncpus_snap)) rcu_state 84 kernel/rcu/tree_exp.h rcu_state.ncpus_snap = ncpus; rcu_state 205 kernel/rcu/tree_exp.h swake_up_one(&rcu_state.expedited_wq); rcu_state 261 kernel/rcu/tree_exp.h trace_rcu_exp_grace_period(rcu_state.name, s, TPS("done")); rcu_state 285 kernel/rcu/tree_exp.h mutex_trylock(&rcu_state.exp_mutex)) rcu_state 305 kernel/rcu/tree_exp.h trace_rcu_exp_funnel_lock(rcu_state.name, rnp->level, rcu_state 314 kernel/rcu/tree_exp.h trace_rcu_exp_funnel_lock(rcu_state.name, rnp->level, rcu_state 317 kernel/rcu/tree_exp.h mutex_lock(&rcu_state.exp_mutex); rcu_state 320 kernel/rcu/tree_exp.h mutex_unlock(&rcu_state.exp_mutex); rcu_state 324 kernel/rcu/tree_exp.h trace_rcu_exp_grace_period(rcu_state.name, s, TPS("start")); rcu_state 400 kernel/rcu/tree_exp.h trace_rcu_exp_grace_period(rcu_state.name, rcu_exp_gp_seq_endval(), TPS("selectofl")); rcu_state 424 kernel/rcu/tree_exp.h trace_rcu_exp_grace_period(rcu_state.name, rcu_exp_gp_seq_endval(), TPS("reset")); rcu_state 426 kernel/rcu/tree_exp.h trace_rcu_exp_grace_period(rcu_state.name, rcu_exp_gp_seq_endval(), TPS("select")); rcu_state 468 kernel/rcu/tree_exp.h trace_rcu_exp_grace_period(rcu_state.name, rcu_exp_gp_seq_endval(), TPS("startwait")); rcu_state 474 kernel/rcu/tree_exp.h rcu_state.expedited_wq, rcu_state 484 kernel/rcu/tree_exp.h rcu_state.name); rcu_state 503 kernel/rcu/tree_exp.h jiffies - jiffies_start, rcu_state.expedited_sequence, rcu_state 547 kernel/rcu/tree_exp.h mutex_lock(&rcu_state.exp_wake_mutex); rcu_state 549 kernel/rcu/tree_exp.h trace_rcu_exp_grace_period(rcu_state.name, s, TPS("end")); rcu_state 562 kernel/rcu/tree_exp.h trace_rcu_exp_grace_period(rcu_state.name, s, TPS("endwake")); rcu_state 563 kernel/rcu/tree_exp.h mutex_unlock(&rcu_state.exp_wake_mutex); rcu_state 836 kernel/rcu/tree_exp.h mutex_unlock(&rcu_state.exp_mutex); rcu_state 310 kernel/rcu/tree_plugin.h trace_rcu_preempt_task(rcu_state.name, rcu_state 709 kernel/rcu/tree_plugin.h time_after(jiffies, rcu_state.gp_start + HZ)) rcu_state 1127 kernel/rcu/tree_plugin.h rcu_state.boost = 1; rcu_state 1651 kernel/rcu/tree_plugin.h trace_rcu_nocb_wake(rcu_state.name, rdp->cpu, rcu_state 1662 kernel/rcu/tree_plugin.h trace_rcu_nocb_wake(rcu_state.name, rdp->cpu, TPS("DoWake")); rcu_state 1680 kernel/rcu/tree_plugin.h trace_rcu_nocb_wake(rcu_state.name, rdp->cpu, reason); rcu_state 1806 kernel/rcu/tree_plugin.h trace_rcu_nocb_wake(rcu_state.name, rdp->cpu, rcu_state 1821 kernel/rcu/tree_plugin.h trace_rcu_nocb_wake(rcu_state.name, rdp->cpu, rcu_state 1844 kernel/rcu/tree_plugin.h trace_rcu_nocb_wake(rcu_state.name, rdp->cpu, TPS("FirstBQ")); rcu_state 1854 kernel/rcu/tree_plugin.h trace_rcu_nocb_wake(rcu_state.name, rdp->cpu, rcu_state 1858 kernel/rcu/tree_plugin.h trace_rcu_nocb_wake(rcu_state.name, rdp->cpu, rcu_state 1884 kernel/rcu/tree_plugin.h trace_rcu_nocb_wake(rcu_state.name, rdp->cpu, rcu_state 1896 kernel/rcu/tree_plugin.h trace_rcu_nocb_wake(rcu_state.name, rdp->cpu, rcu_state 1921 kernel/rcu/tree_plugin.h trace_rcu_nocb_wake(rcu_state.name, rdp->cpu, TPS("WakeNot")); rcu_state 1933 kernel/rcu/tree_plugin.h trace_rcu_nocb_wake(rcu_state.name, rdp->cpu, TPS("Timer")); rcu_state 1965 kernel/rcu/tree_plugin.h trace_rcu_nocb_wake(rcu_state.name, rdp->cpu, TPS("Check")); rcu_state 1979 kernel/rcu/tree_plugin.h trace_rcu_nocb_wake(rcu_state.name, rdp->cpu, rcu_state 2007 kernel/rcu/tree_plugin.h trace_rcu_nocb_wake(rcu_state.name, rdp->cpu, rcu_state 2039 kernel/rcu/tree_plugin.h trace_rcu_nocb_wake(rcu_state.name, cpu, TPS("Poll")); rcu_state 2043 kernel/rcu/tree_plugin.h trace_rcu_nocb_wake(rcu_state.name, cpu, TPS("Sleep")); rcu_state 2046 kernel/rcu/tree_plugin.h trace_rcu_nocb_wake(rcu_state.name, cpu, TPS("EndSleep")); rcu_state 2119 kernel/rcu/tree_plugin.h trace_rcu_nocb_wake(rcu_state.name, rdp->cpu, TPS("CBSleep")); rcu_state 2131 kernel/rcu/tree_plugin.h trace_rcu_nocb_wake(rcu_state.name, rdp->cpu, TPS("WokeEmpty")); rcu_state 2171 kernel/rcu/tree_plugin.h trace_rcu_nocb_wake(rcu_state.name, rdp->cpu, TPS("DeferredWake")); rcu_state 2283 kernel/rcu/tree_plugin.h "rcuo%c/%d", rcu_state.abbr, cpu); rcu_state 2557 kernel/rcu/tree_plugin.h ULONG_CMP_LT(jiffies, READ_ONCE(rcu_state.gp_start) + HZ))) rcu_state 92 kernel/rcu/tree_stall.h WRITE_ONCE(rcu_state.jiffies_stall, jiffies + ULONG_MAX / 2); rcu_state 105 kernel/rcu/tree_stall.h rcu_state.gp_start = j; rcu_state 108 kernel/rcu/tree_stall.h smp_store_release(&rcu_state.jiffies_stall, j + j1); /* ^^^ */ rcu_state 109 kernel/rcu/tree_stall.h rcu_state.jiffies_resched = j + j1 / 2; rcu_state 110 kernel/rcu/tree_stall.h rcu_state.n_force_qs_gpstart = READ_ONCE(rcu_state.n_force_qs); rcu_state 131 kernel/rcu/tree_stall.h j = READ_ONCE(rcu_state.jiffies_kick_kthreads); rcu_state 132 kernel/rcu/tree_stall.h if (time_after(jiffies, j) && rcu_state.gp_kthread && rcu_state 133 kernel/rcu/tree_stall.h (rcu_gp_in_progress() || READ_ONCE(rcu_state.gp_flags))) { rcu_state 135 kernel/rcu/tree_stall.h rcu_state.name); rcu_state 137 kernel/rcu/tree_stall.h wake_up_process(rcu_state.gp_kthread); rcu_state 138 kernel/rcu/tree_stall.h WRITE_ONCE(rcu_state.jiffies_kick_kthreads, j + HZ); rcu_state 307 kernel/rcu/tree_stall.h ticks_value = rcu_seq_ctr(rcu_state.gp_seq - rdp->gp_seq); rcu_state 328 kernel/rcu/tree_stall.h READ_ONCE(rcu_state.n_force_qs) - rcu_state.n_force_qs_gpstart, rcu_state 335 kernel/rcu/tree_stall.h struct task_struct *gpk = rcu_state.gp_kthread; rcu_state 338 kernel/rcu/tree_stall.h j = jiffies - READ_ONCE(rcu_state.gp_activity); rcu_state 341 kernel/rcu/tree_stall.h rcu_state.name, j, rcu_state 342 kernel/rcu/tree_stall.h (long)rcu_seq_current(&rcu_state.gp_seq), rcu_state 343 kernel/rcu/tree_stall.h READ_ONCE(rcu_state.gp_flags), rcu_state 344 kernel/rcu/tree_stall.h gp_state_getname(rcu_state.gp_state), rcu_state.gp_state, rcu_state 374 kernel/rcu/tree_stall.h pr_err("INFO: %s detected stalls on CPUs/tasks:\n", rcu_state.name); rcu_state 391 kernel/rcu/tree_stall.h smp_processor_id(), (long)(jiffies - rcu_state.gp_start), rcu_state 392 kernel/rcu/tree_stall.h (long)rcu_seq_current(&rcu_state.gp_seq), totqlen); rcu_state 400 kernel/rcu/tree_stall.h if (rcu_seq_current(&rcu_state.gp_seq) != gp_seq) { rcu_state 404 kernel/rcu/tree_stall.h gpa = READ_ONCE(rcu_state.gp_activity); rcu_state 406 kernel/rcu/tree_stall.h rcu_state.name, j - gpa, j, gpa, rcu_state 414 kernel/rcu/tree_stall.h if (ULONG_CMP_GE(jiffies, READ_ONCE(rcu_state.jiffies_stall))) rcu_state 415 kernel/rcu/tree_stall.h WRITE_ONCE(rcu_state.jiffies_stall, rcu_state 443 kernel/rcu/tree_stall.h pr_err("INFO: %s self-detected stall on CPU\n", rcu_state.name); rcu_state 450 kernel/rcu/tree_stall.h jiffies - rcu_state.gp_start, rcu_state 451 kernel/rcu/tree_stall.h (long)rcu_seq_current(&rcu_state.gp_seq), totqlen); rcu_state 459 kernel/rcu/tree_stall.h if (ULONG_CMP_GE(jiffies, READ_ONCE(rcu_state.jiffies_stall))) rcu_state 460 kernel/rcu/tree_stall.h WRITE_ONCE(rcu_state.jiffies_stall, rcu_state 511 kernel/rcu/tree_stall.h gs1 = READ_ONCE(rcu_state.gp_seq); rcu_state 513 kernel/rcu/tree_stall.h js = READ_ONCE(rcu_state.jiffies_stall); rcu_state 515 kernel/rcu/tree_stall.h gps = READ_ONCE(rcu_state.gp_start); rcu_state 517 kernel/rcu/tree_stall.h gs2 = READ_ONCE(rcu_state.gp_seq); rcu_state 526 kernel/rcu/tree_stall.h cmpxchg(&rcu_state.jiffies_stall, js, jn) == js) { rcu_state 535 kernel/rcu/tree_stall.h cmpxchg(&rcu_state.jiffies_stall, js, jn) == js) { rcu_state 563 kernel/rcu/tree_stall.h ja = j - READ_ONCE(rcu_state.gp_activity); rcu_state 564 kernel/rcu/tree_stall.h jr = j - READ_ONCE(rcu_state.gp_req_activity); rcu_state 565 kernel/rcu/tree_stall.h jw = j - READ_ONCE(rcu_state.gp_wake_time); rcu_state 567 kernel/rcu/tree_stall.h rcu_state.name, gp_state_getname(rcu_state.gp_state), rcu_state 568 kernel/rcu/tree_stall.h rcu_state.gp_state, rcu_state 569 kernel/rcu/tree_stall.h rcu_state.gp_kthread ? rcu_state.gp_kthread->state : 0x1ffffL, rcu_state 570 kernel/rcu/tree_stall.h ja, jr, jw, (long)READ_ONCE(rcu_state.gp_wake_seq), rcu_state 571 kernel/rcu/tree_stall.h (long)READ_ONCE(rcu_state.gp_seq), rcu_state 573 kernel/rcu/tree_stall.h READ_ONCE(rcu_state.gp_flags)); rcu_state 575 kernel/rcu/tree_stall.h if (ULONG_CMP_GE(rcu_state.gp_seq, rnp->gp_seq_needed)) rcu_state 585 kernel/rcu/tree_stall.h ULONG_CMP_GE(rcu_state.gp_seq, rcu_state 617 kernel/rcu/tree_stall.h if (time_before(j, READ_ONCE(rcu_state.gp_req_activity) + gpssdelay) || rcu_state 618 kernel/rcu/tree_stall.h time_before(j, READ_ONCE(rcu_state.gp_activity) + gpssdelay) || rcu_state 626 kernel/rcu/tree_stall.h time_before(j, READ_ONCE(rcu_state.gp_req_activity) + gpssdelay) || rcu_state 627 kernel/rcu/tree_stall.h time_before(j, READ_ONCE(rcu_state.gp_activity) + gpssdelay) || rcu_state 639 kernel/rcu/tree_stall.h time_before(j, rcu_state.gp_req_activity + gpssdelay) || rcu_state 640 kernel/rcu/tree_stall.h time_before(j, rcu_state.gp_activity + gpssdelay) || rcu_state 670 kernel/rcu/tree_stall.h __func__, jiffies - rcu_state.gp_start); rcu_state 674 kernel/rcu/tree_stall.h __func__, jiffies - rcu_state.gp_end);