Lines Matching refs:ea
38 extern int do_lfs(int rn, unsigned long ea);
39 extern int do_lfd(int rn, unsigned long ea);
40 extern int do_stfs(int rn, unsigned long ea);
41 extern int do_stfd(int rn, unsigned long ea);
42 extern int do_lvx(int rn, unsigned long ea);
43 extern int do_stvx(int rn, unsigned long ea);
44 extern int do_lxvd2x(int rn, unsigned long ea);
45 extern int do_stxvd2x(int rn, unsigned long ea);
84 static long __kprobes address_ok(struct pt_regs *regs, unsigned long ea, int nb) in address_ok() argument
88 return __access_ok(ea, nb, USER_DS); in address_ok()
97 unsigned long ea; in dform_ea() local
100 ea = (signed short) instr; /* sign-extend */ in dform_ea()
102 ea += regs->gpr[ra]; in dform_ea()
104 return truncate_if_32bit(regs->msr, ea); in dform_ea()
114 unsigned long ea; in dsform_ea() local
117 ea = (signed short) (instr & ~3); /* sign-extend */ in dsform_ea()
119 ea += regs->gpr[ra]; in dsform_ea()
121 return truncate_if_32bit(regs->msr, ea); in dsform_ea()
132 unsigned long ea; in xform_ea() local
136 ea = regs->gpr[rb]; in xform_ea()
138 ea += regs->gpr[ra]; in xform_ea()
140 return truncate_if_32bit(regs->msr, ea); in xform_ea()
172 static int __kprobes read_mem_aligned(unsigned long *dest, unsigned long ea, in read_mem_aligned() argument
180 err = __get_user(x, (unsigned char __user *) ea); in read_mem_aligned()
183 err = __get_user(x, (unsigned short __user *) ea); in read_mem_aligned()
186 err = __get_user(x, (unsigned int __user *) ea); in read_mem_aligned()
190 err = __get_user(x, (unsigned long __user *) ea); in read_mem_aligned()
199 static int __kprobes read_mem_unaligned(unsigned long *dest, unsigned long ea, in read_mem_unaligned() argument
215 c = max_align(ea); in read_mem_unaligned()
219 err = read_mem_aligned(&b, ea, c); in read_mem_unaligned()
223 ea += c; in read_mem_unaligned()
250 static int __kprobes read_mem(unsigned long *dest, unsigned long ea, int nb, in read_mem() argument
253 if (!address_ok(regs, ea, nb)) in read_mem()
255 if ((ea & (nb - 1)) == 0) in read_mem()
256 return read_mem_aligned(dest, ea, nb); in read_mem()
257 return read_mem_unaligned(dest, ea, nb, regs); in read_mem()
260 static int __kprobes write_mem_aligned(unsigned long val, unsigned long ea, in write_mem_aligned() argument
267 err = __put_user(val, (unsigned char __user *) ea); in write_mem_aligned()
270 err = __put_user(val, (unsigned short __user *) ea); in write_mem_aligned()
273 err = __put_user(val, (unsigned int __user *) ea); in write_mem_aligned()
277 err = __put_user(val, (unsigned long __user *) ea); in write_mem_aligned()
284 static int __kprobes write_mem_unaligned(unsigned long val, unsigned long ea, in write_mem_unaligned() argument
311 c = max_align(ea); in write_mem_unaligned()
315 err = write_mem_aligned(val >> (nb - c) * 8, ea, c); in write_mem_unaligned()
318 ea += c; in write_mem_unaligned()
327 static int __kprobes write_mem(unsigned long val, unsigned long ea, int nb, in write_mem() argument
330 if (!address_ok(regs, ea, nb)) in write_mem()
332 if ((ea & (nb - 1)) == 0) in write_mem()
333 return write_mem_aligned(val, ea, nb); in write_mem()
334 return write_mem_unaligned(val, ea, nb, regs); in write_mem()
343 unsigned long ea, int nb, in do_fp_load() argument
363 if (!address_ok(regs, ea, nb)) in do_fp_load()
365 if ((ea & 3) == 0) in do_fp_load()
366 return (*func)(rn, ea); in do_fp_load()
369 err = read_mem_unaligned(&data.ul[0], ea, nb, regs); in do_fp_load()
374 err = read_mem_unaligned(&data.ul[0], ea, 4, regs); in do_fp_load()
376 err = read_mem_unaligned(&data.ul[1], ea + 4, 4, regs); in do_fp_load()
384 unsigned long ea, int nb, in do_fp_store() argument
404 if (!address_ok(regs, ea, nb)) in do_fp_store()
406 if ((ea & 3) == 0) in do_fp_store()
407 return (*func)(rn, ea); in do_fp_store()
415 err = write_mem_unaligned(data.ul[0], ea, nb, regs); in do_fp_store()
421 err = write_mem_unaligned(data.ul[0], ea, 4, regs); in do_fp_store()
423 err = write_mem_unaligned(data.ul[1], ea + 4, 4, regs); in do_fp_store()
432 unsigned long ea, struct pt_regs *regs) in do_vec_load() argument
434 if (!address_ok(regs, ea & ~0xfUL, 16)) in do_vec_load()
436 return (*func)(rn, ea); in do_vec_load()
440 unsigned long ea, struct pt_regs *regs) in do_vec_store() argument
442 if (!address_ok(regs, ea & ~0xfUL, 16)) in do_vec_store()
444 return (*func)(rn, ea); in do_vec_store()
450 unsigned long ea, struct pt_regs *regs) in do_vsx_load() argument
455 if (!address_ok(regs, ea, 16)) in do_vsx_load()
457 if ((ea & 3) == 0) in do_vsx_load()
458 return (*func)(rn, ea); in do_vsx_load()
459 err = read_mem_unaligned(&val[0], ea, 8, regs); in do_vsx_load()
461 err = read_mem_unaligned(&val[1], ea + 8, 8, regs); in do_vsx_load()
468 unsigned long ea, struct pt_regs *regs) in do_vsx_store() argument
473 if (!address_ok(regs, ea, 16)) in do_vsx_store()
475 if ((ea & 3) == 0) in do_vsx_store()
476 return (*func)(rn, ea); in do_vsx_store()
480 err = write_mem_unaligned(val[0], ea, 8, regs); in do_vsx_store()
482 err = write_mem_unaligned(val[1], ea + 8, 8, regs); in do_vsx_store()
1300 op->ea = xform_ea(instr, regs); in analyse_instr()
1305 op->ea = xform_ea(instr, regs); in analyse_instr()
1310 op->ea = xform_ea(instr, regs); in analyse_instr()
1316 op->ea = xform_ea(instr, regs); in analyse_instr()
1322 op->ea = xform_ea(instr, regs); in analyse_instr()
1340 op->ea = xform_ea(instr, regs); in analyse_instr()
1448 op->ea = 0; in analyse_instr()
1450 op->ea = truncate_if_32bit(regs->msr, in analyse_instr()
1504 op->ea = 0; in analyse_instr()
1506 op->ea = truncate_if_32bit(regs->msr, in analyse_instr()
1543 op->ea = dform_ea(instr, regs); in analyse_instr()
1549 op->ea = dform_ea(instr, regs); in analyse_instr()
1555 op->ea = dform_ea(instr, regs); in analyse_instr()
1561 op->ea = dform_ea(instr, regs); in analyse_instr()
1567 op->ea = dform_ea(instr, regs); in analyse_instr()
1573 op->ea = dform_ea(instr, regs); in analyse_instr()
1579 op->ea = dform_ea(instr, regs); in analyse_instr()
1586 op->ea = dform_ea(instr, regs); in analyse_instr()
1591 op->ea = dform_ea(instr, regs); in analyse_instr()
1600 op->ea = dform_ea(instr, regs); in analyse_instr()
1608 op->ea = dform_ea(instr, regs); in analyse_instr()
1616 op->ea = dform_ea(instr, regs); in analyse_instr()
1624 op->ea = dform_ea(instr, regs); in analyse_instr()
1630 op->ea = dsform_ea(instr, regs); in analyse_instr()
1645 op->ea = dsform_ea(instr, regs); in analyse_instr()
1711 static __kprobes int handle_stack_update(unsigned long ea, struct pt_regs *regs) in handle_stack_update() argument
1717 if (ea - STACK_INT_FRAME_SIZE <= current->thread.ksp_limit) { in handle_stack_update()
1783 if (!address_ok(regs, op.ea, 8)) in emulate_step()
1787 __cacheop_user_asmx(op.ea, err, "dcbst"); in emulate_step()
1790 __cacheop_user_asmx(op.ea, err, "dcbf"); in emulate_step()
1794 prefetchw((void *) op.ea); in emulate_step()
1798 prefetch((void *) op.ea); in emulate_step()
1801 __cacheop_user_asmx(op.ea, err, "icbi"); in emulate_step()
1811 if (op.ea & (size - 1)) in emulate_step()
1814 if (!address_ok(regs, op.ea, size)) in emulate_step()
1819 __get_user_asmx(val, op.ea, err, "lwarx"); in emulate_step()
1822 __get_user_asmx(val, op.ea, err, "ldarx"); in emulate_step()
1834 if (op.ea & (size - 1)) in emulate_step()
1837 if (!address_ok(regs, op.ea, size)) in emulate_step()
1842 __put_user_asmx(op.val, op.ea, err, "stwcx.", cr); in emulate_step()
1845 __put_user_asmx(op.val, op.ea, err, "stdcx.", cr); in emulate_step()
1859 err = read_mem(®s->gpr[op.reg], op.ea, size, regs); in emulate_step()
1873 err = do_fp_load(op.reg, do_lfs, op.ea, size, regs); in emulate_step()
1875 err = do_fp_load(op.reg, do_lfd, op.ea, size, regs); in emulate_step()
1882 err = do_vec_load(op.reg, do_lvx, op.ea & ~0xfUL, regs); in emulate_step()
1889 err = do_vsx_load(op.reg, do_lxvd2x, op.ea, regs); in emulate_step()
1900 err = read_mem(®s->gpr[rd], op.ea, nb, regs); in emulate_step()
1905 op.ea += 4; in emulate_step()
1916 op.ea >= regs->gpr[1] - STACK_INT_FRAME_SIZE) { in emulate_step()
1917 err = handle_stack_update(op.ea, regs); in emulate_step()
1920 err = write_mem(op.val, op.ea, size, regs); in emulate_step()
1928 err = do_fp_store(op.reg, do_stfs, op.ea, size, regs); in emulate_step()
1930 err = do_fp_store(op.reg, do_stfd, op.ea, size, regs); in emulate_step()
1937 err = do_vec_store(op.reg, do_stvx, op.ea & ~0xfUL, regs); in emulate_step()
1944 err = do_vsx_store(op.reg, do_stxvd2x, op.ea, regs); in emulate_step()
1958 err = write_mem(val, op.ea, nb, regs); in emulate_step()
1961 op.ea += 4; in emulate_step()
2010 regs->gpr[op.update_reg] = op.ea; in emulate_step()