Lines Matching refs:kp
288 static inline void set_kprobe_instance(struct kprobe *kp) in set_kprobe_instance() argument
290 __this_cpu_write(kprobe_instance, kp); in set_kprobe_instance()
353 struct kprobe *kp; in opt_pre_handler() local
355 list_for_each_entry_rcu(kp, &p->list, list) { in opt_pre_handler()
356 if (kp->pre_handler && likely(!kprobe_disabled(kp))) { in opt_pre_handler()
357 set_kprobe_instance(kp); in opt_pre_handler()
358 kp->pre_handler(kp, regs); in opt_pre_handler()
370 op = container_of(p, struct optimized_kprobe, kp); in free_aggr_kprobe()
382 op = container_of(p, struct optimized_kprobe, kp); in kprobe_optready()
398 op = container_of(p, struct optimized_kprobe, kp); in kprobe_disarmed()
409 op = container_of(p, struct optimized_kprobe, kp); in kprobe_queued()
431 op = container_of(p, struct optimized_kprobe, kp); in get_optimized_kprobe()
495 if (kprobe_disabled(&op->kp)) in do_unoptimize_kprobes()
496 arch_disarm_kprobe(&op->kp); in do_unoptimize_kprobes()
497 if (kprobe_unused(&op->kp)) { in do_unoptimize_kprobes()
503 hlist_del_rcu(&op->kp.hlist); in do_unoptimize_kprobes()
517 BUG_ON(!kprobe_unused(&op->kp)); in do_free_cleaned_kprobes()
519 free_aggr_kprobe(&op->kp); in do_free_cleaned_kprobes()
598 op = container_of(p, struct optimized_kprobe, kp); in optimize_kprobe()
605 if (op->kp.flags & KPROBE_FLAG_OPTIMIZED) in optimize_kprobe()
607 op->kp.flags |= KPROBE_FLAG_OPTIMIZED; in optimize_kprobe()
624 if (kprobe_disabled(&op->kp)) in force_unoptimize_kprobe()
625 arch_disarm_kprobe(&op->kp); in force_unoptimize_kprobe()
636 op = container_of(p, struct optimized_kprobe, kp); in unoptimize_kprobe()
651 op->kp.flags &= ~KPROBE_FLAG_OPTIMIZED; in unoptimize_kprobe()
677 op = container_of(ap, struct optimized_kprobe, kp); in reuse_unused_kprobe()
693 op = container_of(p, struct optimized_kprobe, kp); in kill_optimized_kprobe()
697 op->kp.flags &= ~KPROBE_FLAG_OPTIMIZED; in kill_optimized_kprobe()
707 hlist_del_rcu(&op->kp.hlist); in kill_optimized_kprobe()
719 op = container_of(p, struct optimized_kprobe, kp); in prepare_optimized_kprobe()
733 op->kp.addr = p->addr; in alloc_aggr_kprobe()
736 return &op->kp; in alloc_aggr_kprobe()
762 op = container_of(ap, struct optimized_kprobe, kp); in try_to_optimize_kprobe()
968 static void arm_kprobe(struct kprobe *kp) in arm_kprobe() argument
970 if (unlikely(kprobe_ftrace(kp))) { in arm_kprobe()
971 arm_kprobe_ftrace(kp); in arm_kprobe()
980 __arm_kprobe(kp); in arm_kprobe()
985 static void disarm_kprobe(struct kprobe *kp, bool reopt) in disarm_kprobe() argument
987 if (unlikely(kprobe_ftrace(kp))) { in disarm_kprobe()
988 disarm_kprobe_ftrace(kp); in disarm_kprobe()
993 __disarm_kprobe(kp, reopt); in disarm_kprobe()
1003 struct kprobe *kp; in aggr_pre_handler() local
1005 list_for_each_entry_rcu(kp, &p->list, list) { in aggr_pre_handler()
1006 if (kp->pre_handler && likely(!kprobe_disabled(kp))) { in aggr_pre_handler()
1007 set_kprobe_instance(kp); in aggr_pre_handler()
1008 if (kp->pre_handler(kp, regs)) in aggr_pre_handler()
1020 struct kprobe *kp; in aggr_post_handler() local
1022 list_for_each_entry_rcu(kp, &p->list, list) { in aggr_post_handler()
1023 if (kp->post_handler && likely(!kprobe_disabled(kp))) { in aggr_post_handler()
1024 set_kprobe_instance(kp); in aggr_post_handler()
1025 kp->post_handler(kp, regs, flags); in aggr_post_handler()
1066 struct kprobe *kp; in kprobes_inc_nmissed_count() local
1070 list_for_each_entry_rcu(kp, &p->list, list) in kprobes_inc_nmissed_count()
1071 kp->nmissed++; in kprobes_inc_nmissed_count()
1545 struct kprobe *kp; in aggr_kprobe_disabled() local
1547 list_for_each_entry_rcu(kp, &ap->list, list) in aggr_kprobe_disabled()
1548 if (!kprobe_disabled(kp)) in aggr_kprobe_disabled()
1733 jp->kp.pre_handler = setjmp_pre_handler; in register_jprobes()
1734 jp->kp.break_handler = longjmp_break_handler; in register_jprobes()
1735 ret = register_kprobe(&jp->kp); in register_jprobes()
1769 if (__unregister_kprobe_top(&jps[i]->kp) < 0) in unregister_jprobes()
1770 jps[i]->kp.addr = NULL; in unregister_jprobes()
1775 if (jps[i]->kp.addr) in unregister_jprobes()
1776 __unregister_kprobe_bottom(&jps[i]->kp); in unregister_jprobes()
1788 struct kretprobe *rp = container_of(p, struct kretprobe, kp); in pre_handler_kretprobe()
1845 addr = kprobe_addr(&rp->kp); in register_kretprobe()
1855 rp->kp.pre_handler = pre_handler_kretprobe; in register_kretprobe()
1856 rp->kp.post_handler = NULL; in register_kretprobe()
1857 rp->kp.fault_handler = NULL; in register_kretprobe()
1858 rp->kp.break_handler = NULL; in register_kretprobe()
1883 ret = register_kprobe(&rp->kp); in register_kretprobe()
1922 if (__unregister_kprobe_top(&rps[i]->kp) < 0) in unregister_kretprobes()
1923 rps[i]->kp.addr = NULL; in unregister_kretprobes()
1928 if (rps[i]->kp.addr) { in unregister_kretprobes()
1929 __unregister_kprobe_bottom(&rps[i]->kp); in unregister_kretprobes()
1970 struct kprobe *kp; in kill_kprobe() local
1978 list_for_each_entry_rcu(kp, &p->list, list) in kill_kprobe()
1979 kp->flags |= KPROBE_FLAG_GONE; in kill_kprobe()
1992 int disable_kprobe(struct kprobe *kp) in disable_kprobe() argument
1999 if (__disable_kprobe(kp) == NULL) in disable_kprobe()
2008 int enable_kprobe(struct kprobe *kp) in enable_kprobe() argument
2016 p = __get_valid_kprobe(kp); in enable_kprobe()
2022 if (kprobe_gone(kp)) { in enable_kprobe()
2028 if (p != kp) in enable_kprobe()
2029 kp->flags &= ~KPROBE_FLAG_DISABLED; in enable_kprobe()
2041 void dump_kprobe(struct kprobe *kp) in dump_kprobe() argument
2045 kp->symbol_name, kp->addr, kp->offset); in dump_kprobe()
2238 struct kprobe *p, *kp; in show_kprobe_addr() local
2250 list_for_each_entry_rcu(kp, &p->list, list) in show_kprobe_addr()
2251 report_probe(pi, kp, sym, offset, modname, p); in show_kprobe_addr()