Lines Matching refs:op
44 struct optimized_kprobe *op; in __recover_optprobed_insn() local
53 op = container_of(kp, struct optimized_kprobe, kp); in __recover_optprobed_insn()
55 if (list_empty(&op->list)) in __recover_optprobed_insn()
70 memcpy(buf + 1, op->optinsn.copied_insn, RELATIVE_ADDR_SIZE); in __recover_optprobed_insn()
73 memcpy(buf, op->optinsn.copied_insn + offs, RELATIVE_ADDR_SIZE - offs); in __recover_optprobed_insn()
142 optimized_callback(struct optimized_kprobe *op, struct pt_regs *regs) in optimized_callback() argument
148 if (kprobe_disabled(&op->kp)) in optimized_callback()
153 kprobes_inc_nmissed_count(&op->kp); in optimized_callback()
162 regs->ip = (unsigned long)op->kp.addr + INT3_SIZE; in optimized_callback()
165 __this_cpu_write(current_kprobe, &op->kp); in optimized_callback()
167 opt_pre_handler(&op->kp, regs); in optimized_callback()
284 int arch_check_optimized_kprobe(struct optimized_kprobe *op) in arch_check_optimized_kprobe() argument
289 for (i = 1; i < op->optinsn.size; i++) { in arch_check_optimized_kprobe()
290 p = get_kprobe(op->kp.addr + i); in arch_check_optimized_kprobe()
299 int arch_within_optimized_kprobe(struct optimized_kprobe *op, in arch_within_optimized_kprobe() argument
302 return ((unsigned long)op->kp.addr <= addr && in arch_within_optimized_kprobe()
303 (unsigned long)op->kp.addr + op->optinsn.size > addr); in arch_within_optimized_kprobe()
308 void __arch_remove_optimized_kprobe(struct optimized_kprobe *op, int dirty) in __arch_remove_optimized_kprobe() argument
310 if (op->optinsn.insn) { in __arch_remove_optimized_kprobe()
311 free_optinsn_slot(op->optinsn.insn, dirty); in __arch_remove_optimized_kprobe()
312 op->optinsn.insn = NULL; in __arch_remove_optimized_kprobe()
313 op->optinsn.size = 0; in __arch_remove_optimized_kprobe()
317 void arch_remove_optimized_kprobe(struct optimized_kprobe *op) in arch_remove_optimized_kprobe() argument
319 __arch_remove_optimized_kprobe(op, 1); in arch_remove_optimized_kprobe()
327 int arch_prepare_optimized_kprobe(struct optimized_kprobe *op, in arch_prepare_optimized_kprobe() argument
334 if (!can_optimize((unsigned long)op->kp.addr)) in arch_prepare_optimized_kprobe()
337 op->optinsn.insn = get_optinsn_slot(); in arch_prepare_optimized_kprobe()
338 if (!op->optinsn.insn) in arch_prepare_optimized_kprobe()
345 rel = (long)op->optinsn.insn - (long)op->kp.addr + RELATIVEJUMP_SIZE; in arch_prepare_optimized_kprobe()
347 __arch_remove_optimized_kprobe(op, 0); in arch_prepare_optimized_kprobe()
351 buf = (u8 *)op->optinsn.insn; in arch_prepare_optimized_kprobe()
354 ret = copy_optimized_instructions(buf + TMPL_END_IDX, op->kp.addr); in arch_prepare_optimized_kprobe()
356 __arch_remove_optimized_kprobe(op, 0); in arch_prepare_optimized_kprobe()
359 op->optinsn.size = ret; in arch_prepare_optimized_kprobe()
365 synthesize_set_arg1(buf + TMPL_MOVE_IDX, (unsigned long)op); in arch_prepare_optimized_kprobe()
371 synthesize_reljump(buf + TMPL_END_IDX + op->optinsn.size, in arch_prepare_optimized_kprobe()
372 (u8 *)op->kp.addr + op->optinsn.size); in arch_prepare_optimized_kprobe()
376 op->optinsn.size + RELATIVEJUMP_SIZE); in arch_prepare_optimized_kprobe()
386 struct optimized_kprobe *op, *tmp; in arch_optimize_kprobes() local
389 list_for_each_entry_safe(op, tmp, oplist, list) { in arch_optimize_kprobes()
390 s32 rel = (s32)((long)op->optinsn.insn - in arch_optimize_kprobes()
391 ((long)op->kp.addr + RELATIVEJUMP_SIZE)); in arch_optimize_kprobes()
393 WARN_ON(kprobe_disabled(&op->kp)); in arch_optimize_kprobes()
396 memcpy(op->optinsn.copied_insn, op->kp.addr + INT3_SIZE, in arch_optimize_kprobes()
402 text_poke_bp(op->kp.addr, insn_buf, RELATIVEJUMP_SIZE, in arch_optimize_kprobes()
403 op->optinsn.insn); in arch_optimize_kprobes()
405 list_del_init(&op->list); in arch_optimize_kprobes()
410 void arch_unoptimize_kprobe(struct optimized_kprobe *op) in arch_unoptimize_kprobe() argument
416 memcpy(insn_buf + 1, op->optinsn.copied_insn, RELATIVE_ADDR_SIZE); in arch_unoptimize_kprobe()
417 text_poke_bp(op->kp.addr, insn_buf, RELATIVEJUMP_SIZE, in arch_unoptimize_kprobe()
418 op->optinsn.insn); in arch_unoptimize_kprobe()
428 struct optimized_kprobe *op, *tmp; in arch_unoptimize_kprobes() local
430 list_for_each_entry_safe(op, tmp, oplist, list) { in arch_unoptimize_kprobes()
431 arch_unoptimize_kprobe(op); in arch_unoptimize_kprobes()
432 list_move(&op->list, done_list); in arch_unoptimize_kprobes()
438 struct optimized_kprobe *op; in setup_detour_execution() local
442 op = container_of(p, struct optimized_kprobe, kp); in setup_detour_execution()
444 regs->ip = (unsigned long)op->optinsn.insn + TMPL_END_IDX; in setup_detour_execution()