swhash 8380 kernel/events/core.c find_swevent_head_rcu(struct swevent_htable *swhash, u64 type, u32 event_id) swhash 8384 kernel/events/core.c hlist = rcu_dereference(swhash->swevent_hlist); swhash 8393 kernel/events/core.c find_swevent_head(struct swevent_htable *swhash, struct perf_event *event) swhash 8404 kernel/events/core.c hlist = rcu_dereference_protected(swhash->swevent_hlist, swhash 8417 kernel/events/core.c struct swevent_htable *swhash = this_cpu_ptr(&swevent_htable); swhash 8422 kernel/events/core.c head = find_swevent_head_rcu(swhash, type, event_id); swhash 8438 kernel/events/core.c struct swevent_htable *swhash = this_cpu_ptr(&swevent_htable); swhash 8440 kernel/events/core.c return get_recursion_context(swhash->recursion); swhash 8446 kernel/events/core.c struct swevent_htable *swhash = this_cpu_ptr(&swevent_htable); swhash 8448 kernel/events/core.c put_recursion_context(swhash->recursion, rctx); swhash 8484 kernel/events/core.c struct swevent_htable *swhash = this_cpu_ptr(&swevent_htable); swhash 8495 kernel/events/core.c head = find_swevent_head(swhash, event); swhash 8522 kernel/events/core.c swevent_hlist_deref(struct swevent_htable *swhash) swhash 8524 kernel/events/core.c return rcu_dereference_protected(swhash->swevent_hlist, swhash 8525 kernel/events/core.c lockdep_is_held(&swhash->hlist_mutex)); swhash 8528 kernel/events/core.c static void swevent_hlist_release(struct swevent_htable *swhash) swhash 8530 kernel/events/core.c struct swevent_hlist *hlist = swevent_hlist_deref(swhash); swhash 8535 kernel/events/core.c RCU_INIT_POINTER(swhash->swevent_hlist, NULL); swhash 8541 kernel/events/core.c struct swevent_htable *swhash = &per_cpu(swevent_htable, cpu); swhash 8543 kernel/events/core.c mutex_lock(&swhash->hlist_mutex); swhash 8545 kernel/events/core.c if (!--swhash->hlist_refcount) swhash 8546 kernel/events/core.c swevent_hlist_release(swhash); swhash 8548 kernel/events/core.c mutex_unlock(&swhash->hlist_mutex); swhash 8561 kernel/events/core.c struct swevent_htable *swhash = &per_cpu(swevent_htable, cpu); swhash 8564 kernel/events/core.c mutex_lock(&swhash->hlist_mutex); swhash 8565 kernel/events/core.c if (!swevent_hlist_deref(swhash) && swhash 8574 kernel/events/core.c rcu_assign_pointer(swhash->swevent_hlist, hlist); swhash 8576 kernel/events/core.c swhash->hlist_refcount++; swhash 8578 kernel/events/core.c mutex_unlock(&swhash->hlist_mutex); swhash 12109 kernel/events/core.c struct swevent_htable *swhash; swhash 12115 kernel/events/core.c swhash = &per_cpu(swevent_htable, cpu); swhash 12116 kernel/events/core.c mutex_init(&swhash->hlist_mutex); swhash 12131 kernel/events/core.c struct swevent_htable *swhash = &per_cpu(swevent_htable, cpu); swhash 12133 kernel/events/core.c mutex_lock(&swhash->hlist_mutex); swhash 12134 kernel/events/core.c if (swhash->hlist_refcount > 0 && !swevent_hlist_deref(swhash)) { swhash 12139 kernel/events/core.c rcu_assign_pointer(swhash->swevent_hlist, hlist); swhash 12141 kernel/events/core.c mutex_unlock(&swhash->hlist_mutex);