rs1 167 arch/riscv/net/bpf_jit_comp.c static u32 rv_r_insn(u8 funct7, u8 rs2, u8 rs1, u8 funct3, u8 rd, u8 opcode) rs1 169 arch/riscv/net/bpf_jit_comp.c return (funct7 << 25) | (rs2 << 20) | (rs1 << 15) | (funct3 << 12) | rs1 173 arch/riscv/net/bpf_jit_comp.c static u32 rv_i_insn(u16 imm11_0, u8 rs1, u8 funct3, u8 rd, u8 opcode) rs1 175 arch/riscv/net/bpf_jit_comp.c return (imm11_0 << 20) | (rs1 << 15) | (funct3 << 12) | (rd << 7) | rs1 179 arch/riscv/net/bpf_jit_comp.c static u32 rv_s_insn(u16 imm11_0, u8 rs2, u8 rs1, u8 funct3, u8 opcode) rs1 183 arch/riscv/net/bpf_jit_comp.c return (imm11_5 << 25) | (rs2 << 20) | (rs1 << 15) | (funct3 << 12) | rs1 187 arch/riscv/net/bpf_jit_comp.c static u32 rv_sb_insn(u16 imm12_1, u8 rs2, u8 rs1, u8 funct3, u8 opcode) rs1 192 arch/riscv/net/bpf_jit_comp.c return (imm12 << 25) | (rs2 << 20) | (rs1 << 15) | (funct3 << 12) | rs1 211 arch/riscv/net/bpf_jit_comp.c static u32 rv_amo_insn(u8 funct5, u8 aq, u8 rl, u8 rs2, u8 rs1, rs1 216 arch/riscv/net/bpf_jit_comp.c return rv_r_insn(funct7, rs2, rs1, funct3, rd, opcode); rs1 219 arch/riscv/net/bpf_jit_comp.c static u32 rv_addiw(u8 rd, u8 rs1, u16 imm11_0) rs1 221 arch/riscv/net/bpf_jit_comp.c return rv_i_insn(imm11_0, rs1, 0, rd, 0x1b); rs1 224 arch/riscv/net/bpf_jit_comp.c static u32 rv_addi(u8 rd, u8 rs1, u16 imm11_0) rs1 226 arch/riscv/net/bpf_jit_comp.c return rv_i_insn(imm11_0, rs1, 0, rd, 0x13); rs1 229 arch/riscv/net/bpf_jit_comp.c static u32 rv_addw(u8 rd, u8 rs1, u8 rs2) rs1 231 arch/riscv/net/bpf_jit_comp.c return rv_r_insn(0, rs2, rs1, 0, rd, 0x3b); rs1 234 arch/riscv/net/bpf_jit_comp.c static u32 rv_add(u8 rd, u8 rs1, u8 rs2) rs1 236 arch/riscv/net/bpf_jit_comp.c return rv_r_insn(0, rs2, rs1, 0, rd, 0x33); rs1 239 arch/riscv/net/bpf_jit_comp.c static u32 rv_subw(u8 rd, u8 rs1, u8 rs2) rs1 241 arch/riscv/net/bpf_jit_comp.c return rv_r_insn(0x20, rs2, rs1, 0, rd, 0x3b); rs1 244 arch/riscv/net/bpf_jit_comp.c static u32 rv_sub(u8 rd, u8 rs1, u8 rs2) rs1 246 arch/riscv/net/bpf_jit_comp.c return rv_r_insn(0x20, rs2, rs1, 0, rd, 0x33); rs1 249 arch/riscv/net/bpf_jit_comp.c static u32 rv_and(u8 rd, u8 rs1, u8 rs2) rs1 251 arch/riscv/net/bpf_jit_comp.c return rv_r_insn(0, rs2, rs1, 7, rd, 0x33); rs1 254 arch/riscv/net/bpf_jit_comp.c static u32 rv_or(u8 rd, u8 rs1, u8 rs2) rs1 256 arch/riscv/net/bpf_jit_comp.c return rv_r_insn(0, rs2, rs1, 6, rd, 0x33); rs1 259 arch/riscv/net/bpf_jit_comp.c static u32 rv_xor(u8 rd, u8 rs1, u8 rs2) rs1 261 arch/riscv/net/bpf_jit_comp.c return rv_r_insn(0, rs2, rs1, 4, rd, 0x33); rs1 264 arch/riscv/net/bpf_jit_comp.c static u32 rv_mulw(u8 rd, u8 rs1, u8 rs2) rs1 266 arch/riscv/net/bpf_jit_comp.c return rv_r_insn(1, rs2, rs1, 0, rd, 0x3b); rs1 269 arch/riscv/net/bpf_jit_comp.c static u32 rv_mul(u8 rd, u8 rs1, u8 rs2) rs1 271 arch/riscv/net/bpf_jit_comp.c return rv_r_insn(1, rs2, rs1, 0, rd, 0x33); rs1 274 arch/riscv/net/bpf_jit_comp.c static u32 rv_divuw(u8 rd, u8 rs1, u8 rs2) rs1 276 arch/riscv/net/bpf_jit_comp.c return rv_r_insn(1, rs2, rs1, 5, rd, 0x3b); rs1 279 arch/riscv/net/bpf_jit_comp.c static u32 rv_divu(u8 rd, u8 rs1, u8 rs2) rs1 281 arch/riscv/net/bpf_jit_comp.c return rv_r_insn(1, rs2, rs1, 5, rd, 0x33); rs1 284 arch/riscv/net/bpf_jit_comp.c static u32 rv_remuw(u8 rd, u8 rs1, u8 rs2) rs1 286 arch/riscv/net/bpf_jit_comp.c return rv_r_insn(1, rs2, rs1, 7, rd, 0x3b); rs1 289 arch/riscv/net/bpf_jit_comp.c static u32 rv_remu(u8 rd, u8 rs1, u8 rs2) rs1 291 arch/riscv/net/bpf_jit_comp.c return rv_r_insn(1, rs2, rs1, 7, rd, 0x33); rs1 294 arch/riscv/net/bpf_jit_comp.c static u32 rv_sllw(u8 rd, u8 rs1, u8 rs2) rs1 296 arch/riscv/net/bpf_jit_comp.c return rv_r_insn(0, rs2, rs1, 1, rd, 0x3b); rs1 299 arch/riscv/net/bpf_jit_comp.c static u32 rv_sll(u8 rd, u8 rs1, u8 rs2) rs1 301 arch/riscv/net/bpf_jit_comp.c return rv_r_insn(0, rs2, rs1, 1, rd, 0x33); rs1 304 arch/riscv/net/bpf_jit_comp.c static u32 rv_srlw(u8 rd, u8 rs1, u8 rs2) rs1 306 arch/riscv/net/bpf_jit_comp.c return rv_r_insn(0, rs2, rs1, 5, rd, 0x3b); rs1 309 arch/riscv/net/bpf_jit_comp.c static u32 rv_srl(u8 rd, u8 rs1, u8 rs2) rs1 311 arch/riscv/net/bpf_jit_comp.c return rv_r_insn(0, rs2, rs1, 5, rd, 0x33); rs1 314 arch/riscv/net/bpf_jit_comp.c static u32 rv_sraw(u8 rd, u8 rs1, u8 rs2) rs1 316 arch/riscv/net/bpf_jit_comp.c return rv_r_insn(0x20, rs2, rs1, 5, rd, 0x3b); rs1 319 arch/riscv/net/bpf_jit_comp.c static u32 rv_sra(u8 rd, u8 rs1, u8 rs2) rs1 321 arch/riscv/net/bpf_jit_comp.c return rv_r_insn(0x20, rs2, rs1, 5, rd, 0x33); rs1 329 arch/riscv/net/bpf_jit_comp.c static u32 rv_slli(u8 rd, u8 rs1, u16 imm11_0) rs1 331 arch/riscv/net/bpf_jit_comp.c return rv_i_insn(imm11_0, rs1, 1, rd, 0x13); rs1 334 arch/riscv/net/bpf_jit_comp.c static u32 rv_andi(u8 rd, u8 rs1, u16 imm11_0) rs1 336 arch/riscv/net/bpf_jit_comp.c return rv_i_insn(imm11_0, rs1, 7, rd, 0x13); rs1 339 arch/riscv/net/bpf_jit_comp.c static u32 rv_ori(u8 rd, u8 rs1, u16 imm11_0) rs1 341 arch/riscv/net/bpf_jit_comp.c return rv_i_insn(imm11_0, rs1, 6, rd, 0x13); rs1 344 arch/riscv/net/bpf_jit_comp.c static u32 rv_xori(u8 rd, u8 rs1, u16 imm11_0) rs1 346 arch/riscv/net/bpf_jit_comp.c return rv_i_insn(imm11_0, rs1, 4, rd, 0x13); rs1 349 arch/riscv/net/bpf_jit_comp.c static u32 rv_slliw(u8 rd, u8 rs1, u16 imm11_0) rs1 351 arch/riscv/net/bpf_jit_comp.c return rv_i_insn(imm11_0, rs1, 1, rd, 0x1b); rs1 354 arch/riscv/net/bpf_jit_comp.c static u32 rv_srliw(u8 rd, u8 rs1, u16 imm11_0) rs1 356 arch/riscv/net/bpf_jit_comp.c return rv_i_insn(imm11_0, rs1, 5, rd, 0x1b); rs1 359 arch/riscv/net/bpf_jit_comp.c static u32 rv_srli(u8 rd, u8 rs1, u16 imm11_0) rs1 361 arch/riscv/net/bpf_jit_comp.c return rv_i_insn(imm11_0, rs1, 5, rd, 0x13); rs1 364 arch/riscv/net/bpf_jit_comp.c static u32 rv_sraiw(u8 rd, u8 rs1, u16 imm11_0) rs1 366 arch/riscv/net/bpf_jit_comp.c return rv_i_insn(0x400 | imm11_0, rs1, 5, rd, 0x1b); rs1 369 arch/riscv/net/bpf_jit_comp.c static u32 rv_srai(u8 rd, u8 rs1, u16 imm11_0) rs1 371 arch/riscv/net/bpf_jit_comp.c return rv_i_insn(0x400 | imm11_0, rs1, 5, rd, 0x13); rs1 379 arch/riscv/net/bpf_jit_comp.c static u32 rv_jalr(u8 rd, u8 rs1, u16 imm11_0) rs1 381 arch/riscv/net/bpf_jit_comp.c return rv_i_insn(imm11_0, rs1, 0, rd, 0x67); rs1 384 arch/riscv/net/bpf_jit_comp.c static u32 rv_beq(u8 rs1, u8 rs2, u16 imm12_1) rs1 386 arch/riscv/net/bpf_jit_comp.c return rv_sb_insn(imm12_1, rs2, rs1, 0, 0x63); rs1 389 arch/riscv/net/bpf_jit_comp.c static u32 rv_bltu(u8 rs1, u8 rs2, u16 imm12_1) rs1 391 arch/riscv/net/bpf_jit_comp.c return rv_sb_insn(imm12_1, rs2, rs1, 6, 0x63); rs1 394 arch/riscv/net/bpf_jit_comp.c static u32 rv_bgeu(u8 rs1, u8 rs2, u16 imm12_1) rs1 396 arch/riscv/net/bpf_jit_comp.c return rv_sb_insn(imm12_1, rs2, rs1, 7, 0x63); rs1 399 arch/riscv/net/bpf_jit_comp.c static u32 rv_bne(u8 rs1, u8 rs2, u16 imm12_1) rs1 401 arch/riscv/net/bpf_jit_comp.c return rv_sb_insn(imm12_1, rs2, rs1, 1, 0x63); rs1 404 arch/riscv/net/bpf_jit_comp.c static u32 rv_blt(u8 rs1, u8 rs2, u16 imm12_1) rs1 406 arch/riscv/net/bpf_jit_comp.c return rv_sb_insn(imm12_1, rs2, rs1, 4, 0x63); rs1 409 arch/riscv/net/bpf_jit_comp.c static u32 rv_bge(u8 rs1, u8 rs2, u16 imm12_1) rs1 411 arch/riscv/net/bpf_jit_comp.c return rv_sb_insn(imm12_1, rs2, rs1, 5, 0x63); rs1 414 arch/riscv/net/bpf_jit_comp.c static u32 rv_sb(u8 rs1, u16 imm11_0, u8 rs2) rs1 416 arch/riscv/net/bpf_jit_comp.c return rv_s_insn(imm11_0, rs2, rs1, 0, 0x23); rs1 419 arch/riscv/net/bpf_jit_comp.c static u32 rv_sh(u8 rs1, u16 imm11_0, u8 rs2) rs1 421 arch/riscv/net/bpf_jit_comp.c return rv_s_insn(imm11_0, rs2, rs1, 1, 0x23); rs1 424 arch/riscv/net/bpf_jit_comp.c static u32 rv_sw(u8 rs1, u16 imm11_0, u8 rs2) rs1 426 arch/riscv/net/bpf_jit_comp.c return rv_s_insn(imm11_0, rs2, rs1, 2, 0x23); rs1 429 arch/riscv/net/bpf_jit_comp.c static u32 rv_sd(u8 rs1, u16 imm11_0, u8 rs2) rs1 431 arch/riscv/net/bpf_jit_comp.c return rv_s_insn(imm11_0, rs2, rs1, 3, 0x23); rs1 434 arch/riscv/net/bpf_jit_comp.c static u32 rv_lbu(u8 rd, u16 imm11_0, u8 rs1) rs1 436 arch/riscv/net/bpf_jit_comp.c return rv_i_insn(imm11_0, rs1, 4, rd, 0x03); rs1 439 arch/riscv/net/bpf_jit_comp.c static u32 rv_lhu(u8 rd, u16 imm11_0, u8 rs1) rs1 441 arch/riscv/net/bpf_jit_comp.c return rv_i_insn(imm11_0, rs1, 5, rd, 0x03); rs1 444 arch/riscv/net/bpf_jit_comp.c static u32 rv_lwu(u8 rd, u16 imm11_0, u8 rs1) rs1 446 arch/riscv/net/bpf_jit_comp.c return rv_i_insn(imm11_0, rs1, 6, rd, 0x03); rs1 449 arch/riscv/net/bpf_jit_comp.c static u32 rv_ld(u8 rd, u16 imm11_0, u8 rs1) rs1 451 arch/riscv/net/bpf_jit_comp.c return rv_i_insn(imm11_0, rs1, 3, rd, 0x03); rs1 454 arch/riscv/net/bpf_jit_comp.c static u32 rv_amoadd_w(u8 rd, u8 rs2, u8 rs1, u8 aq, u8 rl) rs1 456 arch/riscv/net/bpf_jit_comp.c return rv_amo_insn(0, aq, rl, rs2, rs1, 2, rd, 0x2f); rs1 459 arch/riscv/net/bpf_jit_comp.c static u32 rv_amoadd_d(u8 rd, u8 rs2, u8 rs1, u8 aq, u8 rl) rs1 461 arch/riscv/net/bpf_jit_comp.c return rv_amo_insn(0, aq, rl, rs2, rs1, 3, rd, 0x2f); rs1 71 arch/sparc/kernel/unaligned_32.c static inline void maybe_flush_windows(unsigned int rs1, unsigned int rs2, rs1 74 arch/sparc/kernel/unaligned_32.c if(rs2 >= 16 || rs1 >= 16 || rd >= 16) { rs1 138 arch/sparc/kernel/unaligned_32.c unsigned int rs1 = (insn >> 14) & 0x1f; rs1 143 arch/sparc/kernel/unaligned_32.c maybe_flush_windows(rs1, 0, rd); rs1 144 arch/sparc/kernel/unaligned_32.c return (fetch_reg(rs1, regs) + sign_extend_imm13(insn)); rs1 146 arch/sparc/kernel/unaligned_32.c maybe_flush_windows(rs1, rs2, rd); rs1 147 arch/sparc/kernel/unaligned_32.c return (fetch_reg(rs1, regs) + fetch_reg(rs2, regs)); rs1 154 arch/sparc/kernel/unaligned_32.c unsigned int rs1 = (insn >> 14) & 0x1f; rs1 159 arch/sparc/kernel/unaligned_32.c maybe_flush_windows(rs1, 0, rd); rs1 160 arch/sparc/kernel/unaligned_32.c return (safe_fetch_reg(rs1, regs) + sign_extend_imm13(insn)); rs1 162 arch/sparc/kernel/unaligned_32.c maybe_flush_windows(rs1, rs2, rd); rs1 163 arch/sparc/kernel/unaligned_32.c return (safe_fetch_reg(rs1, regs) + safe_fetch_reg(rs2, regs)); rs1 104 arch/sparc/kernel/unaligned_64.c static inline void maybe_flush_windows(unsigned int rs1, unsigned int rs2, rs1 107 arch/sparc/kernel/unaligned_64.c if (rs2 >= 16 || rs1 >= 16 || rd >= 16) { rs1 173 arch/sparc/kernel/unaligned_64.c unsigned int rs1 = (insn >> 14) & 0x1f; rs1 178 arch/sparc/kernel/unaligned_64.c maybe_flush_windows(rs1, 0, rd, from_kernel); rs1 179 arch/sparc/kernel/unaligned_64.c addr = (fetch_reg(rs1, regs) + sign_extend_imm13(insn)); rs1 181 arch/sparc/kernel/unaligned_64.c maybe_flush_windows(rs1, rs2, rd, from_kernel); rs1 182 arch/sparc/kernel/unaligned_64.c addr = (fetch_reg(rs1, regs) + fetch_reg(rs2, regs)); rs1 140 arch/sparc/kernel/visemul.c static inline void maybe_flush_windows(unsigned int rs1, unsigned int rs2, rs1 143 arch/sparc/kernel/visemul.c if (rs2 >= 16 || rs1 >= 16 || rd >= 16) { rs1 296 arch/sparc/kernel/visemul.c unsigned long orig_rs1, rs1, orig_rs2, rs2, rd_val; rs1 300 arch/sparc/kernel/visemul.c orig_rs1 = rs1 = fetch_reg(RS1(insn), regs); rs1 304 arch/sparc/kernel/visemul.c rs1 = rs1 & 0xffffffff; rs1 311 arch/sparc/kernel/visemul.c left = edge8_tab[rs1 & 0x7].left; rs1 316 arch/sparc/kernel/visemul.c left = edge8_tab_l[rs1 & 0x7].left; rs1 322 arch/sparc/kernel/visemul.c left = edge16_tab[(rs1 >> 1) & 0x3].left; rs1 328 arch/sparc/kernel/visemul.c left = edge16_tab_l[(rs1 >> 1) & 0x3].left; rs1 334 arch/sparc/kernel/visemul.c left = edge32_tab[(rs1 >> 2) & 0x1].left; rs1 340 arch/sparc/kernel/visemul.c left = edge32_tab_l[(rs1 >> 2) & 0x1].left; rs1 345 arch/sparc/kernel/visemul.c if ((rs1 & ~0x7UL) == (rs2 & ~0x7UL)) rs1 374 arch/sparc/kernel/visemul.c unsigned long rs1, rs2, rd_val; rs1 378 arch/sparc/kernel/visemul.c rs1 = fetch_reg(RS1(insn), regs); rs1 384 arch/sparc/kernel/visemul.c rd_val = ((((rs1 >> 11) & 0x3) << 0) | rs1 385 arch/sparc/kernel/visemul.c (((rs1 >> 33) & 0x3) << 2) | rs1 386 arch/sparc/kernel/visemul.c (((rs1 >> 55) & 0x1) << 4) | rs1 387 arch/sparc/kernel/visemul.c (((rs1 >> 13) & 0xf) << 5) | rs1 388 arch/sparc/kernel/visemul.c (((rs1 >> 35) & 0xf) << 9) | rs1 389 arch/sparc/kernel/visemul.c (((rs1 >> 56) & 0xf) << 13) | rs1 390 arch/sparc/kernel/visemul.c (((rs1 >> 17) & bits_mask) << 17) | rs1 391 arch/sparc/kernel/visemul.c (((rs1 >> 39) & bits_mask) << (17 + bits)) | rs1 392 arch/sparc/kernel/visemul.c (((rs1 >> 60) & 0xf) << (17 + (2*bits)))); rs1 408 arch/sparc/kernel/visemul.c unsigned long rs1, rs2, rd_val, gsr; rs1 411 arch/sparc/kernel/visemul.c rs1 = fetch_reg(RS1(insn), regs); rs1 413 arch/sparc/kernel/visemul.c rd_val = rs1 + rs2; rs1 425 arch/sparc/kernel/visemul.c unsigned long rs1, rs2, rd_val; rs1 430 arch/sparc/kernel/visemul.c rs1 = fpd_regval(f, RS1(insn)); rs1 439 arch/sparc/kernel/visemul.c byte = (rs1 >> (which * 8)) & 0xff; rs1 451 arch/sparc/kernel/visemul.c unsigned long rs1, rs2, *rd, rd_val; rs1 454 arch/sparc/kernel/visemul.c rs1 = fpd_regval(f, RS1(insn)); rs1 463 arch/sparc/kernel/visemul.c s1 = (rs1 >> (56 - (i * 8))) & 0xff; rs1 480 arch/sparc/kernel/visemul.c unsigned long rs1, rs2, gsr, scale, rd_val; rs1 510 arch/sparc/kernel/visemul.c rs1 = fpd_regval(f, RS1(insn)); rs1 512 arch/sparc/kernel/visemul.c rd_val = (rs1 << 8) & ~(0x000000ff000000ffUL); rs1 572 arch/sparc/kernel/visemul.c rs1 = fps_regval(f, RS1(insn)); rs1 576 arch/sparc/kernel/visemul.c ((rs1 & 0x000000ff) << 8) | rs1 578 arch/sparc/kernel/visemul.c ((rs1 & 0x0000ff00) << 16) | rs1 580 arch/sparc/kernel/visemul.c ((rs1 & 0x00ff0000) << 24) | rs1 582 arch/sparc/kernel/visemul.c ((rs1 & 0xff000000) << 32)); rs1 592 arch/sparc/kernel/visemul.c unsigned long rs1, rs2, rd_val; rs1 598 arch/sparc/kernel/visemul.c rs1 = fps_regval(f, RS1(insn)); rs1 603 arch/sparc/kernel/visemul.c u16 src1 = (rs1 >> (byte * 8)) & 0x00ff; rs1 623 arch/sparc/kernel/visemul.c rs1 = fps_regval(f, RS1(insn)); rs1 629 arch/sparc/kernel/visemul.c u16 src1 = (rs1 >> (byte * 8)) & 0x00ff; rs1 647 arch/sparc/kernel/visemul.c rs1 = fpd_regval(f, RS1(insn)); rs1 658 arch/sparc/kernel/visemul.c src1 = ((rs1 >> ((16 * byte) + ushift)) & 0x00ff); rs1 677 arch/sparc/kernel/visemul.c rs1 = fps_regval(f, RS1(insn)); rs1 688 arch/sparc/kernel/visemul.c src1 = ((rs1 >> ((16 * byte) + ushift)) & 0x00ff); rs1 708 arch/sparc/kernel/visemul.c unsigned long rs1, rs2, rd_val, i; rs1 710 arch/sparc/kernel/visemul.c rs1 = fpd_regval(f, RS1(insn)); rs1 718 arch/sparc/kernel/visemul.c s16 a = (rs1 >> (i * 16)) & 0xffff; rs1 728 arch/sparc/kernel/visemul.c s32 a = (rs1 >> (i * 32)) & 0xffffffff; rs1 738 arch/sparc/kernel/visemul.c s16 a = (rs1 >> (i * 16)) & 0xffff; rs1 748 arch/sparc/kernel/visemul.c s32 a = (rs1 >> (i * 32)) & 0xffffffff; rs1 758 arch/sparc/kernel/visemul.c s16 a = (rs1 >> (i * 16)) & 0xffff; rs1 768 arch/sparc/kernel/visemul.c s32 a = (rs1 >> (i * 32)) & 0xffffffff; rs1 778 arch/sparc/kernel/visemul.c s16 a = (rs1 >> (i * 16)) & 0xffff; rs1 788 arch/sparc/kernel/visemul.c s32 a = (rs1 >> (i * 32)) & 0xffffffff; rs1 284 arch/sparc/math-emu/math_32.c argp rs1 = NULL, rs2 = NULL, rd = NULL; rs1 369 arch/sparc/math-emu/math_32.c rs1 = (argp)&fregs[freg]; rs1 371 arch/sparc/math-emu/math_32.c case 7: FP_UNPACK_QP (QA, rs1); break; rs1 372 arch/sparc/math-emu/math_32.c case 6: FP_UNPACK_DP (DA, rs1); break; rs1 373 arch/sparc/math-emu/math_32.c case 5: FP_UNPACK_SP (SA, rs1); break; rs1 359 arch/sparc/math-emu/math_64.c argp rs1 = NULL, rs2 = NULL, rd = NULL; rs1 380 arch/sparc/math-emu/math_64.c case 1: rs1 = (argp)&f->regs[freg]; rs1 383 arch/sparc/math-emu/math_64.c rs1 = (argp)&zero; rs1 387 arch/sparc/math-emu/math_64.c case 7: FP_UNPACK_QP (QA, rs1); break; rs1 388 arch/sparc/math-emu/math_64.c case 6: FP_UNPACK_DP (DA, rs1); break; rs1 389 arch/sparc/math-emu/math_64.c case 5: FP_UNPACK_SP (SA, rs1); break; rs1 1002 drivers/edac/pnd2_edac.c daddr->rank |= dnv_get_bit(pmiaddr, dmap[pmiidx].rs1 + 13, 1); rs1 224 drivers/edac/pnd2_edac.h u32 rs1 : 5;