Lines Matching refs:op
368 struct optimized_kprobe *op; in free_aggr_kprobe() local
370 op = container_of(p, struct optimized_kprobe, kp); in free_aggr_kprobe()
371 arch_remove_optimized_kprobe(op); in free_aggr_kprobe()
373 kfree(op); in free_aggr_kprobe()
379 struct optimized_kprobe *op; in kprobe_optready() local
382 op = container_of(p, struct optimized_kprobe, kp); in kprobe_optready()
383 return arch_prepared_optinsn(&op->optinsn); in kprobe_optready()
392 struct optimized_kprobe *op; in kprobe_disarmed() local
398 op = container_of(p, struct optimized_kprobe, kp); in kprobe_disarmed()
400 return kprobe_disabled(p) && list_empty(&op->list); in kprobe_disarmed()
406 struct optimized_kprobe *op; in kprobe_queued() local
409 op = container_of(p, struct optimized_kprobe, kp); in kprobe_queued()
410 if (!list_empty(&op->list)) in kprobe_queued()
424 struct optimized_kprobe *op; in get_optimized_kprobe() local
431 op = container_of(p, struct optimized_kprobe, kp); in get_optimized_kprobe()
432 if (arch_within_optimized_kprobe(op, addr)) in get_optimized_kprobe()
482 struct optimized_kprobe *op, *tmp; in do_unoptimize_kprobes() local
493 list_for_each_entry_safe(op, tmp, &freeing_list, list) { in do_unoptimize_kprobes()
495 if (kprobe_disabled(&op->kp)) in do_unoptimize_kprobes()
496 arch_disarm_kprobe(&op->kp); in do_unoptimize_kprobes()
497 if (kprobe_unused(&op->kp)) { in do_unoptimize_kprobes()
503 hlist_del_rcu(&op->kp.hlist); in do_unoptimize_kprobes()
505 list_del_init(&op->list); in do_unoptimize_kprobes()
514 struct optimized_kprobe *op, *tmp; in do_free_cleaned_kprobes() local
516 list_for_each_entry_safe(op, tmp, &freeing_list, list) { in do_free_cleaned_kprobes()
517 BUG_ON(!kprobe_unused(&op->kp)); in do_free_cleaned_kprobes()
518 list_del_init(&op->list); in do_free_cleaned_kprobes()
519 free_aggr_kprobe(&op->kp); in do_free_cleaned_kprobes()
587 struct optimized_kprobe *op; in optimize_kprobe() local
598 op = container_of(p, struct optimized_kprobe, kp); in optimize_kprobe()
601 if (arch_check_optimized_kprobe(op) < 0) in optimize_kprobe()
605 if (op->kp.flags & KPROBE_FLAG_OPTIMIZED) in optimize_kprobe()
607 op->kp.flags |= KPROBE_FLAG_OPTIMIZED; in optimize_kprobe()
609 if (!list_empty(&op->list)) in optimize_kprobe()
611 list_del_init(&op->list); in optimize_kprobe()
613 list_add(&op->list, &optimizing_list); in optimize_kprobe()
619 static void force_unoptimize_kprobe(struct optimized_kprobe *op) in force_unoptimize_kprobe() argument
622 arch_unoptimize_kprobe(op); in force_unoptimize_kprobe()
624 if (kprobe_disabled(&op->kp)) in force_unoptimize_kprobe()
625 arch_disarm_kprobe(&op->kp); in force_unoptimize_kprobe()
631 struct optimized_kprobe *op; in unoptimize_kprobe() local
636 op = container_of(p, struct optimized_kprobe, kp); in unoptimize_kprobe()
639 if (force && !list_empty(&op->list)) { in unoptimize_kprobe()
645 list_del_init(&op->list); in unoptimize_kprobe()
646 force_unoptimize_kprobe(op); in unoptimize_kprobe()
651 op->kp.flags &= ~KPROBE_FLAG_OPTIMIZED; in unoptimize_kprobe()
652 if (!list_empty(&op->list)) { in unoptimize_kprobe()
654 list_del_init(&op->list); in unoptimize_kprobe()
660 force_unoptimize_kprobe(op); in unoptimize_kprobe()
662 list_add(&op->list, &unoptimizing_list); in unoptimize_kprobe()
670 struct optimized_kprobe *op; in reuse_unused_kprobe() local
677 op = container_of(ap, struct optimized_kprobe, kp); in reuse_unused_kprobe()
678 if (unlikely(list_empty(&op->list))) in reuse_unused_kprobe()
691 struct optimized_kprobe *op; in kill_optimized_kprobe() local
693 op = container_of(p, struct optimized_kprobe, kp); in kill_optimized_kprobe()
694 if (!list_empty(&op->list)) in kill_optimized_kprobe()
696 list_del_init(&op->list); in kill_optimized_kprobe()
697 op->kp.flags &= ~KPROBE_FLAG_OPTIMIZED; in kill_optimized_kprobe()
701 list_add(&op->list, &freeing_list); in kill_optimized_kprobe()
707 hlist_del_rcu(&op->kp.hlist); in kill_optimized_kprobe()
711 arch_remove_optimized_kprobe(op); in kill_optimized_kprobe()
717 struct optimized_kprobe *op; in prepare_optimized_kprobe() local
719 op = container_of(p, struct optimized_kprobe, kp); in prepare_optimized_kprobe()
720 arch_prepare_optimized_kprobe(op, p); in prepare_optimized_kprobe()
726 struct optimized_kprobe *op; in alloc_aggr_kprobe() local
728 op = kzalloc(sizeof(struct optimized_kprobe), GFP_KERNEL); in alloc_aggr_kprobe()
729 if (!op) in alloc_aggr_kprobe()
732 INIT_LIST_HEAD(&op->list); in alloc_aggr_kprobe()
733 op->kp.addr = p->addr; in alloc_aggr_kprobe()
734 arch_prepare_optimized_kprobe(op, p); in alloc_aggr_kprobe()
736 return &op->kp; in alloc_aggr_kprobe()
748 struct optimized_kprobe *op; in try_to_optimize_kprobe() local
762 op = container_of(ap, struct optimized_kprobe, kp); in try_to_optimize_kprobe()
763 if (!arch_prepared_optinsn(&op->optinsn)) { in try_to_optimize_kprobe()
765 arch_remove_optimized_kprobe(op); in try_to_optimize_kprobe()
766 kfree(op); in try_to_optimize_kprobe()