Rn                677 arch/arm/mm/alignment.c 			u32 Rn = (tinstr & (7<<8)) >> 8;
Rn                678 arch/arm/mm/alignment.c 			u32 W = ((L<<Rn) & (tinstr&255)) ? 0 : 1<<21;
Rn                680 arch/arm/mm/alignment.c 			return 0xe8800000 | W | (L<<20) | (Rn<<16) |
Rn                445 arch/arm64/include/asm/insn.h 				       enum aarch64_insn_register Rn,
Rn                450 arch/arm64/include/asm/insn.h 			  enum aarch64_insn_register Rn,
Rn               1604 arch/arm64/kernel/insn.c 				       enum aarch64_insn_register Rn,
Rn               1629 arch/arm64/kernel/insn.c 	insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RN, insn, Rn);
Rn               1635 arch/arm64/kernel/insn.c 			  enum aarch64_insn_register Rn,
Rn               1661 arch/arm64/kernel/insn.c 	insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RN, insn, Rn);
Rn                 51 arch/arm64/net/bpf_jit.h #define A64_BR(Rn)  aarch64_insn_gen_branch_reg(Rn, AARCH64_INSN_BRANCH_NOLINK)
Rn                 52 arch/arm64/net/bpf_jit.h #define A64_BLR(Rn) aarch64_insn_gen_branch_reg(Rn, AARCH64_INSN_BRANCH_LINK)
Rn                 53 arch/arm64/net/bpf_jit.h #define A64_RET(Rn) aarch64_insn_gen_branch_reg(Rn, AARCH64_INSN_BRANCH_RETURN)
Rn                 56 arch/arm64/net/bpf_jit.h #define A64_LS_REG(Rt, Rn, Rm, size, type) \
Rn                 57 arch/arm64/net/bpf_jit.h 	aarch64_insn_gen_load_store_reg(Rt, Rn, Rm, \
Rn                 70 arch/arm64/net/bpf_jit.h #define A64_LS_PAIR(Rt, Rt2, Rn, offset, ls, type) \
Rn                 71 arch/arm64/net/bpf_jit.h 	aarch64_insn_gen_load_store_pair(Rt, Rt2, Rn, offset, \
Rn                 75 arch/arm64/net/bpf_jit.h #define A64_PUSH(Rt, Rt2, Rn) A64_LS_PAIR(Rt, Rt2, Rn, -16, STORE, PRE_INDEX)
Rn                 77 arch/arm64/net/bpf_jit.h #define A64_POP(Rt, Rt2, Rn)  A64_LS_PAIR(Rt, Rt2, Rn, 16, LOAD, POST_INDEX)
Rn                 82 arch/arm64/net/bpf_jit.h #define A64_LSX(sf, Rt, Rn, Rs, type) \
Rn                 83 arch/arm64/net/bpf_jit.h 	aarch64_insn_gen_load_store_ex(Rt, Rn, Rs, A64_SIZE(sf), \
Rn                 86 arch/arm64/net/bpf_jit.h #define A64_LDXR(sf, Rt, Rn) \
Rn                 87 arch/arm64/net/bpf_jit.h 	A64_LSX(sf, Rt, Rn, A64_ZR, LOAD_EX)
Rn                 89 arch/arm64/net/bpf_jit.h #define A64_STXR(sf, Rt, Rn, Rs) \
Rn                 90 arch/arm64/net/bpf_jit.h 	A64_LSX(sf, Rt, Rn, Rs, STORE_EX)
Rn                 93 arch/arm64/net/bpf_jit.h #define A64_STADD(sf, Rn, Rs) \
Rn                 94 arch/arm64/net/bpf_jit.h 	aarch64_insn_gen_stadd(Rn, Rs, A64_SIZE(sf))
Rn                 97 arch/arm64/net/bpf_jit.h #define A64_ADDSUB_IMM(sf, Rd, Rn, imm12, type) \
Rn                 98 arch/arm64/net/bpf_jit.h 	aarch64_insn_gen_add_sub_imm(Rd, Rn, imm12, \
Rn                101 arch/arm64/net/bpf_jit.h #define A64_ADD_I(sf, Rd, Rn, imm12) A64_ADDSUB_IMM(sf, Rd, Rn, imm12, ADD)
Rn                102 arch/arm64/net/bpf_jit.h #define A64_SUB_I(sf, Rd, Rn, imm12) A64_ADDSUB_IMM(sf, Rd, Rn, imm12, SUB)
Rn                104 arch/arm64/net/bpf_jit.h #define A64_MOV(sf, Rd, Rn) A64_ADD_I(sf, Rd, Rn, 0)
Rn                107 arch/arm64/net/bpf_jit.h #define A64_BITFIELD(sf, Rd, Rn, immr, imms, type) \
Rn                108 arch/arm64/net/bpf_jit.h 	aarch64_insn_gen_bitfield(Rd, Rn, immr, imms, \
Rn                111 arch/arm64/net/bpf_jit.h #define A64_SBFM(sf, Rd, Rn, ir, is) A64_BITFIELD(sf, Rd, Rn, ir, is, SIGNED)
Rn                113 arch/arm64/net/bpf_jit.h #define A64_UBFM(sf, Rd, Rn, ir, is) A64_BITFIELD(sf, Rd, Rn, ir, is, UNSIGNED)
Rn                116 arch/arm64/net/bpf_jit.h #define A64_LSL(sf, Rd, Rn, shift) ({	\
Rn                118 arch/arm64/net/bpf_jit.h 	A64_UBFM(sf, Rd, Rn, (unsigned)-(shift) % sz, sz - 1 - (shift)); \
Rn                121 arch/arm64/net/bpf_jit.h #define A64_LSR(sf, Rd, Rn, shift) A64_UBFM(sf, Rd, Rn, shift, (sf) ? 63 : 31)
Rn                123 arch/arm64/net/bpf_jit.h #define A64_ASR(sf, Rd, Rn, shift) A64_SBFM(sf, Rd, Rn, shift, (sf) ? 63 : 31)
Rn                126 arch/arm64/net/bpf_jit.h #define A64_UXTH(sf, Rd, Rn) A64_UBFM(sf, Rd, Rn, 0, 15)
Rn                127 arch/arm64/net/bpf_jit.h #define A64_UXTW(sf, Rd, Rn) A64_UBFM(sf, Rd, Rn, 0, 31)
Rn                141 arch/arm64/net/bpf_jit.h #define A64_ADDSUB_SREG(sf, Rd, Rn, Rm, type) \
Rn                142 arch/arm64/net/bpf_jit.h 	aarch64_insn_gen_add_sub_shifted_reg(Rd, Rn, Rm, 0, \
Rn                145 arch/arm64/net/bpf_jit.h #define A64_ADD(sf, Rd, Rn, Rm)  A64_ADDSUB_SREG(sf, Rd, Rn, Rm, ADD)
Rn                146 arch/arm64/net/bpf_jit.h #define A64_SUB(sf, Rd, Rn, Rm)  A64_ADDSUB_SREG(sf, Rd, Rn, Rm, SUB)
Rn                147 arch/arm64/net/bpf_jit.h #define A64_SUBS(sf, Rd, Rn, Rm) A64_ADDSUB_SREG(sf, Rd, Rn, Rm, SUB_SETFLAGS)
Rn                151 arch/arm64/net/bpf_jit.h #define A64_CMP(sf, Rn, Rm) A64_SUBS(sf, A64_ZR, Rn, Rm)
Rn                154 arch/arm64/net/bpf_jit.h #define A64_DATA1(sf, Rd, Rn, type) aarch64_insn_gen_data1(Rd, Rn, \
Rn                157 arch/arm64/net/bpf_jit.h #define A64_REV16(sf, Rd, Rn) A64_DATA1(sf, Rd, Rn, REVERSE_16)
Rn                158 arch/arm64/net/bpf_jit.h #define A64_REV32(sf, Rd, Rn) A64_DATA1(sf, Rd, Rn, REVERSE_32)
Rn                159 arch/arm64/net/bpf_jit.h #define A64_REV64(Rd, Rn)     A64_DATA1(1, Rd, Rn, REVERSE_64)
Rn                163 arch/arm64/net/bpf_jit.h #define A64_DATA2(sf, Rd, Rn, Rm, type) aarch64_insn_gen_data2(Rd, Rn, Rm, \
Rn                165 arch/arm64/net/bpf_jit.h #define A64_UDIV(sf, Rd, Rn, Rm) A64_DATA2(sf, Rd, Rn, Rm, UDIV)
Rn                166 arch/arm64/net/bpf_jit.h #define A64_LSLV(sf, Rd, Rn, Rm) A64_DATA2(sf, Rd, Rn, Rm, LSLV)
Rn                167 arch/arm64/net/bpf_jit.h #define A64_LSRV(sf, Rd, Rn, Rm) A64_DATA2(sf, Rd, Rn, Rm, LSRV)
Rn                168 arch/arm64/net/bpf_jit.h #define A64_ASRV(sf, Rd, Rn, Rm) A64_DATA2(sf, Rd, Rn, Rm, ASRV)
Rn                172 arch/arm64/net/bpf_jit.h #define A64_MADD(sf, Rd, Ra, Rn, Rm) aarch64_insn_gen_data3(Rd, Ra, Rn, Rm, \
Rn                175 arch/arm64/net/bpf_jit.h #define A64_MSUB(sf, Rd, Ra, Rn, Rm) aarch64_insn_gen_data3(Rd, Ra, Rn, Rm, \
Rn                178 arch/arm64/net/bpf_jit.h #define A64_MUL(sf, Rd, Rn, Rm) A64_MADD(sf, Rd, A64_ZR, Rn, Rm)
Rn                181 arch/arm64/net/bpf_jit.h #define A64_LOGIC_SREG(sf, Rd, Rn, Rm, type) \
Rn                182 arch/arm64/net/bpf_jit.h 	aarch64_insn_gen_logical_shifted_reg(Rd, Rn, Rm, 0, \
Rn                185 arch/arm64/net/bpf_jit.h #define A64_AND(sf, Rd, Rn, Rm)  A64_LOGIC_SREG(sf, Rd, Rn, Rm, AND)
Rn                186 arch/arm64/net/bpf_jit.h #define A64_ORR(sf, Rd, Rn, Rm)  A64_LOGIC_SREG(sf, Rd, Rn, Rm, ORR)
Rn                187 arch/arm64/net/bpf_jit.h #define A64_EOR(sf, Rd, Rn, Rm)  A64_LOGIC_SREG(sf, Rd, Rn, Rm, EOR)
Rn                188 arch/arm64/net/bpf_jit.h #define A64_ANDS(sf, Rd, Rn, Rm) A64_LOGIC_SREG(sf, Rd, Rn, Rm, AND_SETFLAGS)
Rn                190 arch/arm64/net/bpf_jit.h #define A64_TST(sf, Rn, Rm) A64_ANDS(sf, A64_ZR, Rn, Rm)
Rn                210 arch/sh/math-emu/math.c 		WRITE(FRm, Rn + R0 + 4);
Rn                212 arch/sh/math-emu/math.c 		WRITE(FRm, Rn + R0);
Rn                214 arch/sh/math-emu/math.c 		WRITE(FRm, Rn + R0);
Rn                226 arch/sh/math-emu/math.c 		WRITE(FRm, Rn + 4);
Rn                228 arch/sh/math-emu/math.c 		WRITE(FRm, Rn);
Rn                230 arch/sh/math-emu/math.c 		WRITE(FRm, Rn);
Rn                242 arch/sh/math-emu/math.c 		Rn -= 8;
Rn                243 arch/sh/math-emu/math.c 		WRITE(FRm, Rn + 4);
Rn                245 arch/sh/math-emu/math.c 		WRITE(FRm, Rn);
Rn                247 arch/sh/math-emu/math.c 		Rn -= 4;
Rn                248 arch/sh/math-emu/math.c 		WRITE(FRm, Rn);
Rn                439 arch/sh/math-emu/math.c 		Rn = *reg;
Rn                443 arch/sh/math-emu/math.c 		*reg = Rn;
Rn                447 arch/sh/math-emu/math.c 		Rn -= 4;
Rn                448 arch/sh/math-emu/math.c 		WRITE(*reg, Rn);
Rn                452 arch/sh/math-emu/math.c 		READ(*reg, Rn);
Rn                453 arch/sh/math-emu/math.c 		Rn += 4;