imm 59 arch/arm/kvm/trace.h TP_PROTO(unsigned long vcpu_pc, unsigned long r0, unsigned long imm), imm 60 arch/arm/kvm/trace.h TP_ARGS(vcpu_pc, r0, imm), imm 65 arch/arm/kvm/trace.h __field( unsigned long, imm ) imm 71 arch/arm/kvm/trace.h __entry->imm = imm; imm 75 arch/arm/kvm/trace.h __entry->vcpu_pc, __entry->r0, __entry->imm) imm 357 arch/arm/net/bpf_jit_32.c u16 imm; imm 383 arch/arm/net/bpf_jit_32.c imm = offset - (8 + ctx->idx * 4); imm 385 arch/arm/net/bpf_jit_32.c if (imm & ~0xfff) { imm 394 arch/arm/net/bpf_jit_32.c return imm; imm 1353 arch/arm/net/bpf_jit_32.c const s32 imm = insn->imm; imm 1360 arch/arm/net/bpf_jit_32.c #define check_imm(bits, imm) do { \ imm 1361 arch/arm/net/bpf_jit_32.c if ((imm) >= (1 << ((bits) - 1)) || \ imm 1362 arch/arm/net/bpf_jit_32.c (imm) < -(1 << ((bits) - 1))) { \ imm 1364 arch/arm/net/bpf_jit_32.c i, imm, imm); \ imm 1368 arch/arm/net/bpf_jit_32.c #define check_imm24(imm) check_imm(24, imm) imm 1380 arch/arm/net/bpf_jit_32.c if (imm == 1) { imm 1389 arch/arm/net/bpf_jit_32.c emit_a32_mov_se_i64(is64, dst, imm, ctx); imm 1438 arch/arm/net/bpf_jit_32.c emit_a32_mov_se_i64(is64, tmp2, imm, ctx); imm 1456 arch/arm/net/bpf_jit_32.c emit_a32_mov_i(rt, imm, ctx); imm 1476 arch/arm/net/bpf_jit_32.c if (unlikely(imm > 31)) imm 1478 arch/arm/net/bpf_jit_32.c if (imm) imm 1479 arch/arm/net/bpf_jit_32.c emit_a32_alu_i(dst_lo, imm, ctx, BPF_OP(code)); imm 1485 arch/arm/net/bpf_jit_32.c if (unlikely(imm > 63)) imm 1487 arch/arm/net/bpf_jit_32.c emit_a32_lsh_i64(dst, imm, ctx); imm 1491 arch/arm/net/bpf_jit_32.c if (unlikely(imm > 63)) imm 1493 arch/arm/net/bpf_jit_32.c emit_a32_rsh_i64(dst, imm, ctx); imm 1509 arch/arm/net/bpf_jit_32.c if (unlikely(imm > 63)) imm 1511 arch/arm/net/bpf_jit_32.c emit_a32_arsh_i64(dst, imm, ctx); imm 1537 arch/arm/net/bpf_jit_32.c emit_a32_mov_se_i64(is64, tmp2, imm, ctx); imm 1549 arch/arm/net/bpf_jit_32.c switch (imm) { imm 1564 arch/arm/net/bpf_jit_32.c switch (imm) { imm 1591 arch/arm/net/bpf_jit_32.c u64 val = (u32)imm | (u64)insn[1].imm << 32; imm 1613 arch/arm/net/bpf_jit_32.c emit_a32_mov_se_i64(true, tmp2, imm, ctx); imm 1618 arch/arm/net/bpf_jit_32.c emit_a32_mov_i(tmp2[1], imm, ctx); imm 1711 arch/arm/net/bpf_jit_32.c emit_a32_mov_se_i64(true, tmp2, imm, ctx); imm 1780 arch/arm/net/bpf_jit_32.c const u32 func = (u32)__bpf_call_base + (u32)imm; imm 161 arch/arm/net/bpf_jit_32.h #define _AL3_I(op, rd, rn, imm) ((op ## _I) | (rd) << 12 | (rn) << 16 | (imm)) imm 167 arch/arm/net/bpf_jit_32.h #define ARM_ADD_I(rd, rn, imm) _AL3_I(ARM_INST_ADD, rd, rn, imm) imm 168 arch/arm/net/bpf_jit_32.h #define ARM_ADDS_I(rd, rn, imm) _AL3_I(ARM_INST_ADDS, rd, rn, imm) imm 170 arch/arm/net/bpf_jit_32.h #define ARM_ADC_I(rd, rn, imm) _AL3_I(ARM_INST_ADC, rd, rn, imm) imm 174 arch/arm/net/bpf_jit_32.h #define ARM_AND_I(rd, rn, imm) _AL3_I(ARM_INST_AND, rd, rn, imm) imm 177 arch/arm/net/bpf_jit_32.h #define ARM_BIC_I(rd, rn, imm) _AL3_I(ARM_INST_BIC, rd, rn, imm) imm 184 arch/arm/net/bpf_jit_32.h #define ARM_CMP_I(rn, imm) _AL3_I(ARM_INST_CMP, 0, rn, imm) imm 187 arch/arm/net/bpf_jit_32.h #define ARM_EOR_I(rd, rn, imm) _AL3_I(ARM_INST_EOR, rd, rn, imm) imm 192 arch/arm/net/bpf_jit_32.h #define ARM_LDR_R_SI(rt, rn, rm, type, imm) \ imm 195 arch/arm/net/bpf_jit_32.h | (imm) << 7 | (type) << 5 | (rm)) imm 207 arch/arm/net/bpf_jit_32.h #define ARM_LSL_I(rd, rn, imm) (_AL3_I(ARM_INST_LSL, rd, 0, rn) | (imm) << 7) imm 210 arch/arm/net/bpf_jit_32.h #define ARM_LSR_I(rd, rn, imm) (_AL3_I(ARM_INST_LSR, rd, 0, rn) | (imm) << 7) imm 212 arch/arm/net/bpf_jit_32.h #define ARM_ASR_I(rd, rn, imm) (_AL3_I(ARM_INST_ASR, rd, 0, rn) | (imm) << 7) imm 216 arch/arm/net/bpf_jit_32.h #define ARM_MOV_I(rd, imm) _AL3_I(ARM_INST_MOV, rd, 0, imm) imm 222 arch/arm/net/bpf_jit_32.h #define ARM_MOVW(rd, imm) \ imm 223 arch/arm/net/bpf_jit_32.h (ARM_INST_MOVW | ((imm) >> 12) << 16 | (rd) << 12 | ((imm) & 0x0fff)) imm 225 arch/arm/net/bpf_jit_32.h #define ARM_MOVT(rd, imm) \ imm 226 arch/arm/net/bpf_jit_32.h (ARM_INST_MOVT | ((imm) >> 12) << 16 | (rd) << 12 | ((imm) & 0x0fff)) imm 234 arch/arm/net/bpf_jit_32.h #define ARM_ORR_I(rd, rn, imm) _AL3_I(ARM_INST_ORR, rd, rn, imm) imm 248 arch/arm/net/bpf_jit_32.h #define ARM_RSB_I(rd, rn, imm) _AL3_I(ARM_INST_RSB, rd, rn, imm) imm 249 arch/arm/net/bpf_jit_32.h #define ARM_RSBS_I(rd, rn, imm) _AL3_I(ARM_INST_RSBS, rd, rn, imm) imm 250 arch/arm/net/bpf_jit_32.h #define ARM_RSC_I(rd, rn, imm) _AL3_I(ARM_INST_RSC, rd, rn, imm) imm 257 arch/arm/net/bpf_jit_32.h #define ARM_SUB_I(rd, rn, imm) _AL3_I(ARM_INST_SUB, rd, rn, imm) imm 258 arch/arm/net/bpf_jit_32.h #define ARM_SUBS_I(rd, rn, imm) _AL3_I(ARM_INST_SUBS, rd, rn, imm) imm 259 arch/arm/net/bpf_jit_32.h #define ARM_SBC_I(rd, rn, imm) _AL3_I(ARM_INST_SBC, rd, rn, imm) imm 262 arch/arm/net/bpf_jit_32.h #define ARM_TST_I(rn, imm) _AL3_I(ARM_INST_TST, 0, rn, imm) imm 357 arch/arm/probes/kprobes/actions-thumb.c long imm = insn & 0x7f; imm 359 arch/arm/probes/kprobes/actions-thumb.c regs->ARM_sp -= imm * 4; imm 361 arch/arm/probes/kprobes/actions-thumb.c regs->ARM_sp += imm * 4; imm 34 arch/arm/probes/kprobes/checkers-common.c int imm = insn & 0xff; imm 35 arch/arm/probes/kprobes/checkers-common.c asi->stack_space = imm; imm 47 arch/arm/probes/kprobes/checkers-common.c int imm = insn & 0xff; imm 48 arch/arm/probes/kprobes/checkers-common.c asi->stack_space = imm << 2; imm 56 arch/arm/probes/kprobes/checkers-common.c int imm = ((insn & 0xf00) >> 4) + (insn & 0xf); imm 57 arch/arm/probes/kprobes/checkers-common.c asi->stack_space = imm; imm 66 arch/arm/probes/kprobes/checkers-common.c int imm = insn & 0xfff; imm 67 arch/arm/probes/kprobes/checkers-common.c asi->stack_space = imm; imm 90 arch/arm64/include/asm/debug-monitors.h u16 imm; imm 145 arch/arm64/include/asm/fpsimdmacros.h .macro _sve_rdvl nx, imm imm 362 arch/arm64/include/asm/insn.h u32 insn, u64 imm); imm 402 arch/arm64/include/asm/insn.h int imm, enum aarch64_insn_variant variant, imm 413 arch/arm64/include/asm/insn.h int imm, int shift, imm 447 arch/arm64/include/asm/insn.h u64 imm); imm 315 arch/arm64/kernel/debug-monitors.c if ((comment & ~hook->mask) == hook->imm) imm 318 arch/arm64/kernel/insn.c u32 insn, u64 imm) imm 329 arch/arm64/kernel/insn.c immlo = (imm & ADR_IMM_LOMASK) << ADR_IMM_LOSHIFT; imm 330 arch/arm64/kernel/insn.c imm >>= ADR_IMM_HILOSPLIT; imm 331 arch/arm64/kernel/insn.c immhi = (imm & ADR_IMM_HIMASK) << ADR_IMM_HISHIFT; imm 332 arch/arm64/kernel/insn.c imm = immlo | immhi; imm 346 arch/arm64/kernel/insn.c insn |= (imm & mask) << shift; imm 838 arch/arm64/kernel/insn.c int imm, enum aarch64_insn_variant variant, imm 873 arch/arm64/kernel/insn.c if (imm & ~(BIT(24) - 1)) imm 877 arch/arm64/kernel/insn.c if (imm & ~(SZ_4K - 1)) { imm 879 arch/arm64/kernel/insn.c if (imm & (SZ_4K - 1)) imm 882 arch/arm64/kernel/insn.c imm >>= 12; imm 890 arch/arm64/kernel/insn.c return aarch64_insn_encode_immediate(AARCH64_INSN_IMM_12, insn, imm); imm 893 arch/arm64/kernel/insn.c pr_err("%s: invalid immediate encoding %d\n", __func__, imm); imm 953 arch/arm64/kernel/insn.c int imm, int shift, imm 974 arch/arm64/kernel/insn.c if (imm & ~(SZ_64K - 1)) { imm 975 arch/arm64/kernel/insn.c pr_err("%s: invalid immediate encoding %d\n", __func__, imm); imm 1004 arch/arm64/kernel/insn.c return aarch64_insn_encode_immediate(AARCH64_INSN_IMM_16, insn, imm); imm 1307 arch/arm64/kernel/insn.c s32 imm; imm 1310 arch/arm64/kernel/insn.c imm = aarch64_insn_decode_immediate(AARCH64_INSN_IMM_26, insn); imm 1311 arch/arm64/kernel/insn.c return (imm << 6) >> 4; imm 1316 arch/arm64/kernel/insn.c imm = aarch64_insn_decode_immediate(AARCH64_INSN_IMM_19, insn); imm 1317 arch/arm64/kernel/insn.c return (imm << 13) >> 11; imm 1321 arch/arm64/kernel/insn.c imm = aarch64_insn_decode_immediate(AARCH64_INSN_IMM_14, insn); imm 1322 arch/arm64/kernel/insn.c return (imm << 18) >> 16; imm 1506 arch/arm64/kernel/insn.c static u32 aarch64_encode_immediate(u64 imm, imm 1514 arch/arm64/kernel/insn.c if (!imm || !~imm) imm 1519 arch/arm64/kernel/insn.c if (upper_32_bits(imm)) imm 1539 arch/arm64/kernel/insn.c if ((imm & emask) != ((imm >> tmp) & emask)) imm 1550 arch/arm64/kernel/insn.c imm &= mask; imm 1553 arch/arm64/kernel/insn.c ones = hweight64(imm); imm 1564 arch/arm64/kernel/insn.c if (range_of_ones(imm)) { imm 1570 arch/arm64/kernel/insn.c ror = __ffs64(imm); imm 1579 arch/arm64/kernel/insn.c imm |= ~mask; imm 1580 arch/arm64/kernel/insn.c if (!range_of_ones(~imm)) imm 1587 arch/arm64/kernel/insn.c ror = fls(~imm); imm 1606 arch/arm64/kernel/insn.c u64 imm) imm 1630 arch/arm64/kernel/insn.c return aarch64_encode_immediate(imm, variant, insn); imm 262 arch/arm64/kernel/kgdb.c .imm = KGDB_DYN_DBG_BRK_IMM, imm 267 arch/arm64/kernel/kgdb.c .imm = KGDB_COMPILED_DBG_BRK_IMM, imm 155 arch/arm64/kernel/module.c u64 imm; imm 160 arch/arm64/kernel/module.c imm = sval >> lsb; imm 179 arch/arm64/kernel/module.c imm = ~imm; imm 184 arch/arm64/kernel/module.c insn = aarch64_insn_encode_immediate(AARCH64_INSN_IMM_16, insn, imm); imm 187 arch/arm64/kernel/module.c if (imm > U16_MAX) imm 196 arch/arm64/kernel/module.c u64 imm, imm_mask; imm 206 arch/arm64/kernel/module.c imm = sval & imm_mask; imm 209 arch/arm64/kernel/module.c insn = aarch64_insn_encode_immediate(imm_type, insn, imm); imm 438 arch/arm64/kernel/probes/kprobes.c .imm = KPROBES_BRK_IMM, imm 89 arch/arm64/kernel/probes/simulate-insn.c long imm, xn, val; imm 92 arch/arm64/kernel/probes/simulate-insn.c imm = ((opcode >> 3) & 0x1ffffc) | ((opcode >> 29) & 0x3); imm 93 arch/arm64/kernel/probes/simulate-insn.c imm = sign_extend64(imm, 20); imm 95 arch/arm64/kernel/probes/simulate-insn.c val = (imm<<12) + (addr & 0xfffffffffffff000); imm 97 arch/arm64/kernel/probes/simulate-insn.c val = imm + addr; imm 191 arch/arm64/kernel/probes/uprobes.c .imm = UPROBES_BRK_IMM, imm 978 arch/arm64/kernel/traps.c .imm = BUG_BRK_IMM, imm 1022 arch/arm64/kernel/traps.c .imm = KASAN_BRK_IMM, imm 30 arch/arm64/kvm/trace.h TP_PROTO(unsigned long vcpu_pc, unsigned long r0, unsigned long imm), imm 31 arch/arm64/kvm/trace.h TP_ARGS(vcpu_pc, r0, imm), imm 36 arch/arm64/kvm/trace.h __field(unsigned long, imm) imm 42 arch/arm64/kvm/trace.h __entry->imm = imm; imm 46 arch/arm64/kvm/trace.h __entry->vcpu_pc, __entry->r0, __entry->imm) imm 352 arch/arm64/net/bpf_jit_comp.c const s32 imm = insn->imm; imm 360 arch/arm64/net/bpf_jit_comp.c #define check_imm(bits, imm) do { \ imm 361 arch/arm64/net/bpf_jit_comp.c if ((((imm) > 0) && ((imm) >> (bits))) || \ imm 362 arch/arm64/net/bpf_jit_comp.c (((imm) < 0) && (~(imm) >> (bits)))) { \ imm 364 arch/arm64/net/bpf_jit_comp.c i, imm, imm); \ imm 368 arch/arm64/net/bpf_jit_comp.c #define check_imm19(imm) check_imm(19, imm) imm 369 arch/arm64/net/bpf_jit_comp.c #define check_imm26(imm) check_imm(26, imm) imm 443 arch/arm64/net/bpf_jit_comp.c switch (imm) { imm 459 arch/arm64/net/bpf_jit_comp.c switch (imm) { imm 476 arch/arm64/net/bpf_jit_comp.c emit_a64_mov_i(is64, dst, imm, ctx); imm 481 arch/arm64/net/bpf_jit_comp.c emit_a64_mov_i(is64, tmp, imm, ctx); imm 486 arch/arm64/net/bpf_jit_comp.c emit_a64_mov_i(is64, tmp, imm, ctx); imm 491 arch/arm64/net/bpf_jit_comp.c emit_a64_mov_i(is64, tmp, imm, ctx); imm 496 arch/arm64/net/bpf_jit_comp.c emit_a64_mov_i(is64, tmp, imm, ctx); imm 501 arch/arm64/net/bpf_jit_comp.c emit_a64_mov_i(is64, tmp, imm, ctx); imm 506 arch/arm64/net/bpf_jit_comp.c emit_a64_mov_i(is64, tmp, imm, ctx); imm 511 arch/arm64/net/bpf_jit_comp.c emit_a64_mov_i(is64, tmp, imm, ctx); imm 516 arch/arm64/net/bpf_jit_comp.c emit_a64_mov_i(is64, tmp2, imm, ctx); imm 522 arch/arm64/net/bpf_jit_comp.c emit(A64_LSL(is64, dst, dst, imm), ctx); imm 526 arch/arm64/net/bpf_jit_comp.c emit(A64_LSR(is64, dst, dst, imm), ctx); imm 530 arch/arm64/net/bpf_jit_comp.c emit(A64_ASR(is64, dst, dst, imm), ctx); imm 626 arch/arm64/net/bpf_jit_comp.c emit_a64_mov_i(is64, tmp, imm, ctx); imm 631 arch/arm64/net/bpf_jit_comp.c emit_a64_mov_i(is64, tmp, imm, ctx); imm 673 arch/arm64/net/bpf_jit_comp.c imm64 = (u64)insn1.imm << 32 | (u32)imm; imm 708 arch/arm64/net/bpf_jit_comp.c emit_a64_mov_i(1, tmp, imm, ctx); imm 219 arch/csky/abiv1/alignment.c uint32_t imm = 0; imm 254 arch/csky/abiv1/alignment.c imm = (opcode >> 4) & 0xf; imm 263 arch/csky/abiv1/alignment.c addr = get_ptreg(regs, rx) + (imm << 1); imm 267 arch/csky/abiv1/alignment.c addr = get_ptreg(regs, rx) + (imm << 2); imm 271 arch/csky/abiv1/alignment.c addr = get_ptreg(regs, rx) + (imm << 1); imm 275 arch/csky/abiv1/alignment.c addr = get_ptreg(regs, rx) + (imm << 2); imm 294 arch/csky/abiv1/alignment.c __func__, opcode, rz, rx, imm, addr); imm 179 arch/csky/abiv1/inc/abi/entry.h .macro ANDI_R3 rx, imm imm 247 arch/csky/abiv2/inc/abi/entry.h .macro ANDI_R3 rx, imm imm 223 arch/ia64/kernel/patch.c u64 ip, mask, imm; imm 227 arch/ia64/kernel/patch.c imm = (((val >> 7) & 0x3f) << 27) | (val & 0x7f) << 13; imm 231 arch/ia64/kernel/patch.c ia64_patch(ip, mask, imm); imm 171 arch/ia64/kernel/unaligned.c unsigned long imm:7; /* [13:19] */ imm 694 arch/ia64/kernel/unaligned.c unsigned long imm; imm 702 arch/ia64/kernel/unaligned.c imm = ld.x << 7 | ld.imm; imm 707 arch/ia64/kernel/unaligned.c if (ld.m) imm |= SIGN_EXT9; imm 713 arch/ia64/kernel/unaligned.c ifa += imm; imm 717 arch/ia64/kernel/unaligned.c DPRINT("ld.x=%d ld.m=%d imm=%ld r3=0x%lx\n", ld.x, ld.m, imm, ifa); imm 739 arch/ia64/kernel/unaligned.c getreg(ld.imm, &r2, &nat_r2, regs); imm 748 arch/ia64/kernel/unaligned.c DPRINT("imm=%d r2=%ld r3=0x%lx nat_r2=%d\n",ld.imm, r2, ifa, nat_r2); imm 886 arch/ia64/kernel/unaligned.c getreg(ld.imm, &r2, NULL, regs); imm 915 arch/ia64/kernel/unaligned.c unsigned long imm; imm 920 arch/ia64/kernel/unaligned.c imm = ld.x << 7 | ld.r1; imm 924 arch/ia64/kernel/unaligned.c if (ld.m) imm |= SIGN_EXT9; imm 928 arch/ia64/kernel/unaligned.c ifa += imm; imm 930 arch/ia64/kernel/unaligned.c DPRINT("imm=%lx r3=%lx\n", imm, ifa); imm 1057 arch/ia64/kernel/unaligned.c DPRINT("ld.r1=%d ld.imm=%d x6_sz=%d\n", ld.r1, ld.imm, ld.x6_sz); imm 1090 arch/ia64/kernel/unaligned.c setfpreg(ld.imm, &fpr_final[1], regs); imm 1122 arch/ia64/kernel/unaligned.c invala_fr(ld.imm); imm 1223 arch/ia64/kernel/unaligned.c getfpreg(ld.imm, &fpr_init, regs); imm 1258 arch/ia64/kernel/unaligned.c unsigned long imm; imm 1263 arch/ia64/kernel/unaligned.c imm = ld.x << 7 | ld.r1; imm 1268 arch/ia64/kernel/unaligned.c imm |= SIGN_EXT9; imm 1272 arch/ia64/kernel/unaligned.c ifa += imm; imm 1274 arch/ia64/kernel/unaligned.c DPRINT("imm=%lx r3=%lx\n", imm, ifa); imm 1391 arch/ia64/kernel/unaligned.c "ld.x6=0x%x ld.m=%d ld.op=%d\n", opcode, u.insn.qp, u.insn.r1, u.insn.imm, imm 103 arch/microblaze/kernel/ftrace.c static unsigned int imm; /* saving whole imm instruction */ imm 135 arch/microblaze/kernel/ftrace.c imm = *(unsigned int *)rec->ip; imm 136 arch/microblaze/kernel/ftrace.c pr_debug("%s: imm:0x%x\n", __func__, imm); imm 157 arch/microblaze/kernel/ftrace.c __func__, (unsigned int)addr, (unsigned int)rec->ip, imm); imm 158 arch/microblaze/kernel/ftrace.c ret = ftrace_modify_code(rec->ip, imm); imm 961 arch/mips/include/uapi/asm/inst.h __BITFIELD_FIELD(unsigned int imm : 4, imm 986 arch/mips/include/uapi/asm/inst.h __BITFIELD_FIELD(unsigned int imm : 5, imm 1015 arch/mips/include/uapi/asm/inst.h __BITFIELD_FIELD(unsigned int imm : 8, imm 1023 arch/mips/include/uapi/asm/inst.h __BITFIELD_FIELD(unsigned int imm : 5, imm 1030 arch/mips/include/uapi/asm/inst.h __BITFIELD_FIELD(unsigned int imm : 8, imm 1038 arch/mips/include/uapi/asm/inst.h __BITFIELD_FIELD(unsigned int imm : 5, imm 1045 arch/mips/include/uapi/asm/inst.h __BITFIELD_FIELD(unsigned int imm : 8, imm 221 arch/mips/kernel/process.c *poff = ip->mm16_r5_format.imm; imm 228 arch/mips/kernel/process.c *poff = ip->mm16_m_format.imm; imm 345 arch/mips/kernel/process.c tmp = ip->mm16_r5_format.imm >> 1; imm 2069 arch/mips/kernel/unaligned.c switch (mips16inst.ri.imm >> 5) { imm 2088 arch/mips/kernel/unaligned.c switch (mips16inst.ri.imm >> 5) { imm 411 arch/mips/net/ebpf_jit.c if (insn->imm >= S16_MIN && insn->imm <= S16_MAX) { imm 412 arch/mips/net/ebpf_jit.c emit_instr(ctx, addiu, reg, MIPS_R_ZERO, insn->imm); imm 414 arch/mips/net/ebpf_jit.c int lower = (s16)(insn->imm & 0xffff); imm 415 arch/mips/net/ebpf_jit.c int upper = insn->imm - lower; imm 473 arch/mips/net/ebpf_jit.c if (insn->imm >= lower_bound && insn->imm <= upper_bound) { imm 477 arch/mips/net/ebpf_jit.c emit_instr(ctx, daddiu, dst, MIPS_R_ZERO, insn->imm); imm 481 arch/mips/net/ebpf_jit.c emit_instr(ctx, andi, dst, dst, insn->imm); imm 485 arch/mips/net/ebpf_jit.c emit_instr(ctx, ori, dst, dst, insn->imm); imm 489 arch/mips/net/ebpf_jit.c emit_instr(ctx, xori, dst, dst, insn->imm); imm 492 arch/mips/net/ebpf_jit.c emit_instr(ctx, daddiu, dst, dst, insn->imm); imm 495 arch/mips/net/ebpf_jit.c emit_instr(ctx, daddiu, dst, dst, -insn->imm); imm 498 arch/mips/net/ebpf_jit.c emit_instr(ctx, dsrl_safe, dst, dst, insn->imm & 0x3f); imm 501 arch/mips/net/ebpf_jit.c emit_instr(ctx, srl, dst, dst, insn->imm & 0x1f); imm 504 arch/mips/net/ebpf_jit.c emit_instr(ctx, dsll_safe, dst, dst, insn->imm & 0x3f); imm 507 arch/mips/net/ebpf_jit.c emit_instr(ctx, sll, dst, dst, insn->imm & 0x1f); imm 510 arch/mips/net/ebpf_jit.c emit_instr(ctx, dsra_safe, dst, dst, insn->imm & 0x3f); imm 513 arch/mips/net/ebpf_jit.c emit_instr(ctx, sra, dst, dst, insn->imm & 0x1f); imm 516 arch/mips/net/ebpf_jit.c emit_instr(ctx, addiu, dst, MIPS_R_ZERO, insn->imm); imm 519 arch/mips/net/ebpf_jit.c emit_instr(ctx, addiu, dst, dst, insn->imm); imm 522 arch/mips/net/ebpf_jit.c emit_instr(ctx, addiu, dst, dst, -insn->imm); imm 702 arch/mips/net/ebpf_jit.c if (insn->imm == 1) /* Mult by 1 is a nop */ imm 729 arch/mips/net/ebpf_jit.c if (insn->imm == 1) /* Mult by 1 is a nop */ imm 752 arch/mips/net/ebpf_jit.c if (insn->imm == 0) imm 761 arch/mips/net/ebpf_jit.c if (insn->imm == 1) { imm 783 arch/mips/net/ebpf_jit.c if (insn->imm == 0) imm 790 arch/mips/net/ebpf_jit.c if (insn->imm == 1) { imm 1007 arch/mips/net/ebpf_jit.c if (insn->imm == 0) { imm 1160 arch/mips/net/ebpf_jit.c if (insn->imm == 0) { imm 1207 arch/mips/net/ebpf_jit.c t64s = insn->imm + 1; imm 1209 arch/mips/net/ebpf_jit.c t64s = insn->imm + 1; imm 1211 arch/mips/net/ebpf_jit.c t64s = insn->imm; imm 1239 arch/mips/net/ebpf_jit.c t64s = (u64)(u32)(insn->imm) + 1; imm 1241 arch/mips/net/ebpf_jit.c t64s = (u64)(u32)(insn->imm) + 1; imm 1243 arch/mips/net/ebpf_jit.c t64s = (u64)(u32)(insn->imm); imm 1258 arch/mips/net/ebpf_jit.c if (ctx->use_bbit_insns && hweight32((u32)insn->imm) == 1) { imm 1263 arch/mips/net/ebpf_jit.c emit_instr(ctx, bbit0, dst, ffs((u32)insn->imm) - 1, b_off); imm 1270 arch/mips/net/ebpf_jit.c emit_instr(ctx, bbit1, dst, ffs((u32)insn->imm) - 1, b_off); imm 1274 arch/mips/net/ebpf_jit.c t64 = (u32)insn->imm; imm 1304 arch/mips/net/ebpf_jit.c t64 = ((u64)(u32)insn->imm) | ((u64)(insn + 1)->imm << 32); imm 1310 arch/mips/net/ebpf_jit.c t64s = (s64)insn->imm + (long)__bpf_call_base; imm 1328 arch/mips/net/ebpf_jit.c if (insn->imm == 64 && td == REG_32BIT) imm 1331 arch/mips/net/ebpf_jit.c if (insn->imm != 64 && td == REG_64BIT) { imm 1341 arch/mips/net/ebpf_jit.c if (insn->imm == 16) { imm 1345 arch/mips/net/ebpf_jit.c } else if (insn->imm == 32) { imm 1593 arch/mips/net/ebpf_jit.c if (insn->imm >= 0) imm 1600 arch/mips/net/ebpf_jit.c if (insn->imm == 64) imm 1602 arch/mips/net/ebpf_jit.c else if (insn->imm == 32) imm 1618 arch/mips/net/ebpf_jit.c if (insn->imm >= 0) imm 1635 arch/mips/net/ebpf_jit.c val = (s64)((u32)insn->imm | ((u64)(insn + 1)->imm << 32)); imm 1655 arch/mips/net/ebpf_jit.c insn->imm >= 0 ? REG_32BIT_POS : REG_32BIT); imm 107 arch/nds32/kernel/ftrace.c unsigned long imm = addr >> 12; imm 110 arch/nds32/kernel/ftrace.c return ENDIAN_CONVERT(opcode | rt_num | imm); imm 116 arch/nds32/kernel/ftrace.c unsigned long imm = addr & 0x0000fff; imm 120 arch/nds32/kernel/ftrace.c return ENDIAN_CONVERT(opcode | rt_num | ra_num | imm); imm 198 arch/nds32/mm/alignment.c int imm, regular, load, len, addr_mode, idx_mode; imm 204 arch/nds32/mm/alignment.c imm = 1; imm 212 arch/nds32/mm/alignment.c imm = 1; imm 220 arch/nds32/mm/alignment.c imm = 1; imm 228 arch/nds32/mm/alignment.c imm = 0; imm 236 arch/nds32/mm/alignment.c imm = 1; imm 244 arch/nds32/mm/alignment.c imm = 1; imm 252 arch/nds32/mm/alignment.c imm = 1; imm 260 arch/nds32/mm/alignment.c imm = 0; imm 285 arch/nds32/mm/alignment.c if (imm) imm 315 arch/nds32/mm/alignment.c int imm, regular, load, len, sign_ext; imm 323 arch/nds32/mm/alignment.c imm = 1; imm 330 arch/nds32/mm/alignment.c imm = 1; imm 337 arch/nds32/mm/alignment.c imm = 1; imm 344 arch/nds32/mm/alignment.c imm = 1; imm 351 arch/nds32/mm/alignment.c imm = 1; imm 358 arch/nds32/mm/alignment.c imm = 1; imm 365 arch/nds32/mm/alignment.c imm = 1; imm 372 arch/nds32/mm/alignment.c imm = 1; imm 379 arch/nds32/mm/alignment.c imm = 1; imm 386 arch/nds32/mm/alignment.c imm = 1; imm 397 arch/nds32/mm/alignment.c imm = 0; imm 404 arch/nds32/mm/alignment.c imm = 0; imm 411 arch/nds32/mm/alignment.c imm = 0; imm 418 arch/nds32/mm/alignment.c imm = 0; imm 425 arch/nds32/mm/alignment.c imm = 0; imm 432 arch/nds32/mm/alignment.c imm = 0; imm 439 arch/nds32/mm/alignment.c imm = 0; imm 446 arch/nds32/mm/alignment.c imm = 0; imm 453 arch/nds32/mm/alignment.c imm = 0; imm 460 arch/nds32/mm/alignment.c imm = 0; imm 472 arch/nds32/mm/alignment.c if (imm) imm 355 arch/openrisc/kernel/traps.c long imm; imm 364 arch/openrisc/kernel/traps.c imm = (short)insn; imm 365 arch/openrisc/kernel/traps.c lwa_addr = (unsigned long __user *)(regs->gpr[ra] + imm); imm 397 arch/openrisc/kernel/traps.c long imm; imm 406 arch/openrisc/kernel/traps.c imm = (short)(((insn & 0x2200000) >> 10) | (insn & 0x7ff)); imm 407 arch/openrisc/kernel/traps.c vaddr = (unsigned long __user *)(regs->gpr[ra] + imm); imm 316 arch/powerpc/lib/code-patching.c signed long imm; imm 318 arch/powerpc/lib/code-patching.c imm = *instr & 0x3FFFFFC; imm 321 arch/powerpc/lib/code-patching.c if (imm & 0x2000000) imm 322 arch/powerpc/lib/code-patching.c imm -= 0x4000000; imm 325 arch/powerpc/lib/code-patching.c imm += (unsigned long)instr; imm 327 arch/powerpc/lib/code-patching.c return (unsigned long)imm; imm 332 arch/powerpc/lib/code-patching.c signed long imm; imm 334 arch/powerpc/lib/code-patching.c imm = *instr & 0xFFFC; imm 337 arch/powerpc/lib/code-patching.c if (imm & 0x8000) imm 338 arch/powerpc/lib/code-patching.c imm -= 0x10000; imm 341 arch/powerpc/lib/code-patching.c imm += (unsigned long)instr; imm 343 arch/powerpc/lib/code-patching.c return (unsigned long)imm; imm 1169 arch/powerpc/lib/sstep.c unsigned long int imm; imm 1180 arch/powerpc/lib/sstep.c imm = (signed short)(instr & 0xfffc); imm 1182 arch/powerpc/lib/sstep.c imm += regs->nip; imm 1183 arch/powerpc/lib/sstep.c op->val = truncate_if_32bit(regs->msr, imm); imm 1199 arch/powerpc/lib/sstep.c imm = instr & 0x03fffffc; imm 1200 arch/powerpc/lib/sstep.c if (imm & 0x02000000) imm 1201 arch/powerpc/lib/sstep.c imm -= 0x04000000; imm 1203 arch/powerpc/lib/sstep.c imm += regs->nip; imm 1204 arch/powerpc/lib/sstep.c op->val = truncate_if_32bit(regs->msr, imm); imm 1223 arch/powerpc/lib/sstep.c imm = (instr & 0x400)? regs->ctr: regs->link; imm 1224 arch/powerpc/lib/sstep.c op->val = truncate_if_32bit(regs->msr, imm); imm 1342 arch/powerpc/lib/sstep.c imm = (short) instr; imm 1343 arch/powerpc/lib/sstep.c add_with_carry(regs, op, rd, ~regs->gpr[ra], imm, 1); imm 1347 arch/powerpc/lib/sstep.c imm = (unsigned short) instr; imm 1353 arch/powerpc/lib/sstep.c do_cmp_unsigned(regs, op, val, imm, rd >> 2); imm 1357 arch/powerpc/lib/sstep.c imm = (short) instr; imm 1363 arch/powerpc/lib/sstep.c do_cmp_signed(regs, op, val, imm, rd >> 2); imm 1367 arch/powerpc/lib/sstep.c imm = (short) instr; imm 1368 arch/powerpc/lib/sstep.c add_with_carry(regs, op, rd, regs->gpr[ra], imm, 0); imm 1372 arch/powerpc/lib/sstep.c imm = (short) instr; imm 1373 arch/powerpc/lib/sstep.c add_with_carry(regs, op, rd, regs->gpr[ra], imm, 0); imm 1378 arch/powerpc/lib/sstep.c imm = (short) instr; imm 1380 arch/powerpc/lib/sstep.c imm += regs->gpr[ra]; imm 1381 arch/powerpc/lib/sstep.c op->val = imm; imm 1385 arch/powerpc/lib/sstep.c imm = ((short) instr) << 16; imm 1387 arch/powerpc/lib/sstep.c imm += regs->gpr[ra]; imm 1388 arch/powerpc/lib/sstep.c op->val = imm; imm 1394 arch/powerpc/lib/sstep.c imm = (short) (instr & 0xffc1); /* d0 + d2 fields */ imm 1395 arch/powerpc/lib/sstep.c imm |= (instr >> 15) & 0x3e; /* d1 field */ imm 1396 arch/powerpc/lib/sstep.c op->val = regs->nip + (imm << 16) + 4; imm 1406 arch/powerpc/lib/sstep.c imm = MASK32(mb, me); imm 1407 arch/powerpc/lib/sstep.c op->val = (regs->gpr[ra] & ~imm) | (ROTATE(val, rb) & imm); imm 1430 arch/powerpc/lib/sstep.c imm = (unsigned short) instr; imm 1431 arch/powerpc/lib/sstep.c op->val = regs->gpr[rd] | (imm << 16); imm 1439 arch/powerpc/lib/sstep.c imm = (unsigned short) instr; imm 1440 arch/powerpc/lib/sstep.c op->val = regs->gpr[rd] ^ (imm << 16); imm 1449 arch/powerpc/lib/sstep.c imm = (unsigned short) instr; imm 1450 arch/powerpc/lib/sstep.c op->val = regs->gpr[rd] & (imm << 16); imm 1472 arch/powerpc/lib/sstep.c imm = MASK64(mb, 63 - sh); imm 1473 arch/powerpc/lib/sstep.c val = (regs->gpr[ra] & ~imm) | imm 1474 arch/powerpc/lib/sstep.c (val & imm); imm 1539 arch/powerpc/lib/sstep.c imm = (instr & 0x10000)? 0x8002: 0xefffffffffffeffeUL; imm 1540 arch/powerpc/lib/sstep.c op->val = imm; imm 1545 arch/powerpc/lib/sstep.c imm = 0xffffffffUL; imm 1547 arch/powerpc/lib/sstep.c imm = 0xf0000000UL; imm 1551 arch/powerpc/lib/sstep.c imm >>= 4; imm 1554 arch/powerpc/lib/sstep.c op->val = regs->ccr & imm; imm 1559 arch/powerpc/lib/sstep.c imm = 0xf0000000UL; imm 1564 arch/powerpc/lib/sstep.c op->ccval = (op->ccval & ~imm) | imm 1565 arch/powerpc/lib/sstep.c (val & imm); imm 1566 arch/powerpc/lib/sstep.c imm >>= 4; imm 308 arch/powerpc/net/bpf_jit_comp64.c s32 imm = insn[i].imm; imm 354 arch/powerpc/net/bpf_jit_comp64.c imm = -imm; imm 355 arch/powerpc/net/bpf_jit_comp64.c if (imm) { imm 356 arch/powerpc/net/bpf_jit_comp64.c if (imm >= -32768 && imm < 32768) imm 357 arch/powerpc/net/bpf_jit_comp64.c PPC_ADDI(dst_reg, dst_reg, IMM_L(imm)); imm 359 arch/powerpc/net/bpf_jit_comp64.c PPC_LI32(b2p[TMP_REG_1], imm); imm 373 arch/powerpc/net/bpf_jit_comp64.c if (imm >= -32768 && imm < 32768) imm 374 arch/powerpc/net/bpf_jit_comp64.c PPC_MULI(dst_reg, dst_reg, IMM_L(imm)); imm 376 arch/powerpc/net/bpf_jit_comp64.c PPC_LI32(b2p[TMP_REG_1], imm); imm 409 arch/powerpc/net/bpf_jit_comp64.c if (imm == 0) imm 411 arch/powerpc/net/bpf_jit_comp64.c else if (imm == 1) imm 414 arch/powerpc/net/bpf_jit_comp64.c PPC_LI32(b2p[TMP_REG_1], imm); imm 458 arch/powerpc/net/bpf_jit_comp64.c if (!IMM_H(imm)) imm 459 arch/powerpc/net/bpf_jit_comp64.c PPC_ANDI(dst_reg, dst_reg, IMM_L(imm)); imm 462 arch/powerpc/net/bpf_jit_comp64.c PPC_LI32(b2p[TMP_REG_1], imm); imm 472 arch/powerpc/net/bpf_jit_comp64.c if (imm < 0 && BPF_CLASS(code) == BPF_ALU64) { imm 474 arch/powerpc/net/bpf_jit_comp64.c PPC_LI32(b2p[TMP_REG_1], imm); imm 477 arch/powerpc/net/bpf_jit_comp64.c if (IMM_L(imm)) imm 478 arch/powerpc/net/bpf_jit_comp64.c PPC_ORI(dst_reg, dst_reg, IMM_L(imm)); imm 479 arch/powerpc/net/bpf_jit_comp64.c if (IMM_H(imm)) imm 480 arch/powerpc/net/bpf_jit_comp64.c PPC_ORIS(dst_reg, dst_reg, IMM_H(imm)); imm 489 arch/powerpc/net/bpf_jit_comp64.c if (imm < 0 && BPF_CLASS(code) == BPF_ALU64) { imm 491 arch/powerpc/net/bpf_jit_comp64.c PPC_LI32(b2p[TMP_REG_1], imm); imm 494 arch/powerpc/net/bpf_jit_comp64.c if (IMM_L(imm)) imm 495 arch/powerpc/net/bpf_jit_comp64.c PPC_XORI(dst_reg, dst_reg, IMM_L(imm)); imm 496 arch/powerpc/net/bpf_jit_comp64.c if (IMM_H(imm)) imm 497 arch/powerpc/net/bpf_jit_comp64.c PPC_XORIS(dst_reg, dst_reg, IMM_H(imm)); imm 512 arch/powerpc/net/bpf_jit_comp64.c PPC_SLWI(dst_reg, dst_reg, imm); imm 517 arch/powerpc/net/bpf_jit_comp64.c if (imm != 0) imm 518 arch/powerpc/net/bpf_jit_comp64.c PPC_SLDI(dst_reg, dst_reg, imm); imm 529 arch/powerpc/net/bpf_jit_comp64.c PPC_SRWI(dst_reg, dst_reg, imm); imm 534 arch/powerpc/net/bpf_jit_comp64.c if (imm != 0) imm 535 arch/powerpc/net/bpf_jit_comp64.c PPC_SRDI(dst_reg, dst_reg, imm); imm 544 arch/powerpc/net/bpf_jit_comp64.c PPC_SRAWI(dst_reg, dst_reg, imm); imm 547 arch/powerpc/net/bpf_jit_comp64.c if (imm != 0) imm 548 arch/powerpc/net/bpf_jit_comp64.c PPC_SRADI(dst_reg, dst_reg, imm); imm 556 arch/powerpc/net/bpf_jit_comp64.c if (imm == 1) { imm 565 arch/powerpc/net/bpf_jit_comp64.c PPC_LI32(dst_reg, imm); imm 566 arch/powerpc/net/bpf_jit_comp64.c if (imm < 0) imm 590 arch/powerpc/net/bpf_jit_comp64.c switch (imm) { imm 629 arch/powerpc/net/bpf_jit_comp64.c switch (imm) { imm 653 arch/powerpc/net/bpf_jit_comp64.c PPC_LI(b2p[TMP_REG_1], imm); imm 661 arch/powerpc/net/bpf_jit_comp64.c PPC_LI(b2p[TMP_REG_1], imm); imm 669 arch/powerpc/net/bpf_jit_comp64.c PPC_LI32(b2p[TMP_REG_1], imm); imm 677 arch/powerpc/net/bpf_jit_comp64.c PPC_LI32(b2p[TMP_REG_1], imm); imm 741 arch/powerpc/net/bpf_jit_comp64.c imm64 = ((u64)(u32) insn[i].imm) | imm 742 arch/powerpc/net/bpf_jit_comp64.c (((u64)(u32) insn[i+1].imm) << 32); imm 913 arch/powerpc/net/bpf_jit_comp64.c if (imm >= 0 && imm < 32768) { imm 915 arch/powerpc/net/bpf_jit_comp64.c PPC_CMPLWI(dst_reg, imm); imm 917 arch/powerpc/net/bpf_jit_comp64.c PPC_CMPLDI(dst_reg, imm); imm 920 arch/powerpc/net/bpf_jit_comp64.c PPC_LI32(b2p[TMP_REG_1], imm); imm 946 arch/powerpc/net/bpf_jit_comp64.c if (imm >= -32768 && imm < 32768) { imm 948 arch/powerpc/net/bpf_jit_comp64.c PPC_CMPWI(dst_reg, imm); imm 950 arch/powerpc/net/bpf_jit_comp64.c PPC_CMPDI(dst_reg, imm); imm 952 arch/powerpc/net/bpf_jit_comp64.c PPC_LI32(b2p[TMP_REG_1], imm); imm 965 arch/powerpc/net/bpf_jit_comp64.c if (imm >= 0 && imm < 32768) imm 967 arch/powerpc/net/bpf_jit_comp64.c PPC_ANDI(b2p[TMP_REG_1], dst_reg, imm); imm 971 arch/powerpc/net/bpf_jit_comp64.c PPC_LI32(tmp_reg, imm); imm 203 arch/riscv/net/bpf_jit_comp.c u32 imm; imm 205 arch/riscv/net/bpf_jit_comp.c imm = (imm20_1 & 0x80000) | ((imm20_1 & 0x3ff) << 9) | imm 208 arch/riscv/net/bpf_jit_comp.c return (imm << 12) | (rd << 7) | opcode; imm 744 arch/riscv/net/bpf_jit_comp.c s32 imm = insn->imm; imm 752 arch/riscv/net/bpf_jit_comp.c if (imm == 1) { imm 842 arch/riscv/net/bpf_jit_comp.c int shift = 64 - imm; imm 855 arch/riscv/net/bpf_jit_comp.c if (imm == 16) imm 867 arch/riscv/net/bpf_jit_comp.c if (imm == 32) imm 899 arch/riscv/net/bpf_jit_comp.c emit_imm(rd, imm, ctx); imm 907 arch/riscv/net/bpf_jit_comp.c if (is_12b_int(imm)) { imm 908 arch/riscv/net/bpf_jit_comp.c emit(is64 ? rv_addi(rd, rd, imm) : imm 909 arch/riscv/net/bpf_jit_comp.c rv_addiw(rd, rd, imm), ctx); imm 911 arch/riscv/net/bpf_jit_comp.c emit_imm(RV_REG_T1, imm, ctx); imm 920 arch/riscv/net/bpf_jit_comp.c if (is_12b_int(-imm)) { imm 921 arch/riscv/net/bpf_jit_comp.c emit(is64 ? rv_addi(rd, rd, -imm) : imm 922 arch/riscv/net/bpf_jit_comp.c rv_addiw(rd, rd, -imm), ctx); imm 924 arch/riscv/net/bpf_jit_comp.c emit_imm(RV_REG_T1, imm, ctx); imm 933 arch/riscv/net/bpf_jit_comp.c if (is_12b_int(imm)) { imm 934 arch/riscv/net/bpf_jit_comp.c emit(rv_andi(rd, rd, imm), ctx); imm 936 arch/riscv/net/bpf_jit_comp.c emit_imm(RV_REG_T1, imm, ctx); imm 944 arch/riscv/net/bpf_jit_comp.c if (is_12b_int(imm)) { imm 945 arch/riscv/net/bpf_jit_comp.c emit(rv_ori(rd, rd, imm), ctx); imm 947 arch/riscv/net/bpf_jit_comp.c emit_imm(RV_REG_T1, imm, ctx); imm 955 arch/riscv/net/bpf_jit_comp.c if (is_12b_int(imm)) { imm 956 arch/riscv/net/bpf_jit_comp.c emit(rv_xori(rd, rd, imm), ctx); imm 958 arch/riscv/net/bpf_jit_comp.c emit_imm(RV_REG_T1, imm, ctx); imm 966 arch/riscv/net/bpf_jit_comp.c emit_imm(RV_REG_T1, imm, ctx); imm 974 arch/riscv/net/bpf_jit_comp.c emit_imm(RV_REG_T1, imm, ctx); imm 982 arch/riscv/net/bpf_jit_comp.c emit_imm(RV_REG_T1, imm, ctx); imm 990 arch/riscv/net/bpf_jit_comp.c emit(is64 ? rv_slli(rd, rd, imm) : rv_slliw(rd, rd, imm), ctx); imm 996 arch/riscv/net/bpf_jit_comp.c emit(is64 ? rv_srli(rd, rd, imm) : rv_srliw(rd, rd, imm), ctx); imm 1002 arch/riscv/net/bpf_jit_comp.c emit(is64 ? rv_srai(rd, rd, imm) : rv_sraiw(rd, rd, imm), ctx); imm 1115 arch/riscv/net/bpf_jit_comp.c emit_imm(RV_REG_T1, imm, ctx); imm 1124 arch/riscv/net/bpf_jit_comp.c emit_imm(RV_REG_T1, imm, ctx); imm 1133 arch/riscv/net/bpf_jit_comp.c emit_imm(RV_REG_T1, imm, ctx); imm 1142 arch/riscv/net/bpf_jit_comp.c emit_imm(RV_REG_T1, imm, ctx); imm 1151 arch/riscv/net/bpf_jit_comp.c emit_imm(RV_REG_T1, imm, ctx); imm 1160 arch/riscv/net/bpf_jit_comp.c emit_imm(RV_REG_T1, imm, ctx); imm 1169 arch/riscv/net/bpf_jit_comp.c emit_imm(RV_REG_T1, imm, ctx); imm 1178 arch/riscv/net/bpf_jit_comp.c emit_imm(RV_REG_T1, imm, ctx); imm 1187 arch/riscv/net/bpf_jit_comp.c emit_imm(RV_REG_T1, imm, ctx); imm 1196 arch/riscv/net/bpf_jit_comp.c emit_imm(RV_REG_T1, imm, ctx); imm 1205 arch/riscv/net/bpf_jit_comp.c emit_imm(RV_REG_T1, imm, ctx); imm 1263 arch/riscv/net/bpf_jit_comp.c imm64 = (u64)insn1.imm << 32 | (u32)imm; imm 1318 arch/riscv/net/bpf_jit_comp.c emit_imm(RV_REG_T1, imm, ctx); imm 1330 arch/riscv/net/bpf_jit_comp.c emit_imm(RV_REG_T1, imm, ctx); imm 1341 arch/riscv/net/bpf_jit_comp.c emit_imm(RV_REG_T1, imm, ctx); imm 1352 arch/riscv/net/bpf_jit_comp.c emit_imm(RV_REG_T1, imm, ctx); imm 182 arch/s390/net/bpf_jit_comp.c #define EMIT4_IMM(op, b1, imm) \ imm 184 arch/s390/net/bpf_jit_comp.c unsigned int __imm = (imm) & 0xffff; \ imm 236 arch/s390/net/bpf_jit_comp.c #define EMIT6_PCREL_IMM_LABEL(op1, op2, b1, imm, label, mask) \ imm 240 arch/s390/net/bpf_jit_comp.c (rel & 0xffff), op2 | (imm & 0xff) << 8); \ imm 242 arch/s390/net/bpf_jit_comp.c BUILD_BUG_ON(((unsigned long) imm) > 0xff); \ imm 267 arch/s390/net/bpf_jit_comp.c #define _EMIT6_IMM(op, imm) \ imm 269 arch/s390/net/bpf_jit_comp.c unsigned int __imm = (imm); \ imm 273 arch/s390/net/bpf_jit_comp.c #define EMIT6_IMM(op, b1, imm) \ imm 275 arch/s390/net/bpf_jit_comp.c _EMIT6_IMM(op | reg_high(b1) << 16, imm); \ imm 514 arch/s390/net/bpf_jit_comp.c s32 imm = insn->imm; imm 536 arch/s390/net/bpf_jit_comp.c EMIT6_IMM(0xc00f0000, dst_reg, imm); imm 542 arch/s390/net/bpf_jit_comp.c EMIT6_IMM(0xc0010000, dst_reg, imm); imm 552 arch/s390/net/bpf_jit_comp.c imm64 = (u64)(u32) insn[0].imm | ((u64)(u32) insn[1].imm) << 32; imm 572 arch/s390/net/bpf_jit_comp.c if (!imm) imm 575 arch/s390/net/bpf_jit_comp.c EMIT6_IMM(0xc20b0000, dst_reg, imm); imm 579 arch/s390/net/bpf_jit_comp.c if (!imm) imm 582 arch/s390/net/bpf_jit_comp.c EMIT6_IMM(0xc2080000, dst_reg, imm); imm 597 arch/s390/net/bpf_jit_comp.c if (!imm) imm 600 arch/s390/net/bpf_jit_comp.c EMIT6_IMM(0xc20b0000, dst_reg, -imm); imm 604 arch/s390/net/bpf_jit_comp.c if (!imm) imm 607 arch/s390/net/bpf_jit_comp.c EMIT6_IMM(0xc2080000, dst_reg, -imm); imm 622 arch/s390/net/bpf_jit_comp.c if (imm == 1) imm 625 arch/s390/net/bpf_jit_comp.c EMIT6_IMM(0xc2010000, dst_reg, imm); imm 629 arch/s390/net/bpf_jit_comp.c if (imm == 1) imm 632 arch/s390/net/bpf_jit_comp.c EMIT6_IMM(0xc2000000, dst_reg, imm); imm 674 arch/s390/net/bpf_jit_comp.c if (imm == 1) { imm 686 arch/s390/net/bpf_jit_comp.c EMIT_CONST_U32(imm)); imm 698 arch/s390/net/bpf_jit_comp.c if (imm == 1) { imm 710 arch/s390/net/bpf_jit_comp.c EMIT_CONST_U64(imm)); imm 729 arch/s390/net/bpf_jit_comp.c EMIT6_IMM(0xc00b0000, dst_reg, imm); imm 735 arch/s390/net/bpf_jit_comp.c EMIT_CONST_U64(imm)); imm 751 arch/s390/net/bpf_jit_comp.c EMIT6_IMM(0xc00d0000, dst_reg, imm); imm 757 arch/s390/net/bpf_jit_comp.c EMIT_CONST_U64(imm)); imm 772 arch/s390/net/bpf_jit_comp.c if (!imm) imm 775 arch/s390/net/bpf_jit_comp.c EMIT6_IMM(0xc0070000, dst_reg, imm); imm 781 arch/s390/net/bpf_jit_comp.c EMIT_CONST_U64(imm)); imm 796 arch/s390/net/bpf_jit_comp.c if (imm == 0) imm 799 arch/s390/net/bpf_jit_comp.c EMIT4_DISP(0x89000000, dst_reg, REG_0, imm); imm 803 arch/s390/net/bpf_jit_comp.c if (imm == 0) imm 806 arch/s390/net/bpf_jit_comp.c EMIT6_DISP_LH(0xeb000000, 0x000d, dst_reg, dst_reg, REG_0, imm); imm 821 arch/s390/net/bpf_jit_comp.c if (imm == 0) imm 824 arch/s390/net/bpf_jit_comp.c EMIT4_DISP(0x88000000, dst_reg, REG_0, imm); imm 828 arch/s390/net/bpf_jit_comp.c if (imm == 0) imm 831 arch/s390/net/bpf_jit_comp.c EMIT6_DISP_LH(0xeb000000, 0x000c, dst_reg, dst_reg, REG_0, imm); imm 846 arch/s390/net/bpf_jit_comp.c if (imm == 0) imm 849 arch/s390/net/bpf_jit_comp.c EMIT4_DISP(0x8a000000, dst_reg, REG_0, imm); imm 853 arch/s390/net/bpf_jit_comp.c if (imm == 0) imm 856 arch/s390/net/bpf_jit_comp.c EMIT6_DISP_LH(0xeb000000, 0x000a, dst_reg, dst_reg, REG_0, imm); imm 875 arch/s390/net/bpf_jit_comp.c switch (imm) { imm 892 arch/s390/net/bpf_jit_comp.c switch (imm) { imm 941 arch/s390/net/bpf_jit_comp.c EMIT4_IMM(0xa7080000, REG_W0, (u8) imm); imm 948 arch/s390/net/bpf_jit_comp.c EMIT4_IMM(0xa7080000, REG_W0, (u16) imm); imm 955 arch/s390/net/bpf_jit_comp.c EMIT6_IMM(0xc00f0000, REG_W0, (u32) imm); imm 962 arch/s390/net/bpf_jit_comp.c EMIT6_IMM(0xc0010000, REG_W0, imm); imm 1187 arch/s390/net/bpf_jit_comp.c EMIT6_IMM(0xc00f0000, REG_W1, imm); imm 1192 arch/s390/net/bpf_jit_comp.c EMIT6_IMM(0xc0010000, REG_W1, imm); imm 1251 arch/s390/net/bpf_jit_comp.c EMIT6_IMM(0xc0010000, REG_W1, imm); imm 1259 arch/s390/net/bpf_jit_comp.c EMIT6_IMM(0xc0010000, REG_W1, imm); imm 300 arch/sh/kernel/disassemble.c int imm = 0; imm 317 arch/sh/kernel/disassemble.c imm = (nibs[2] << 4) | (nibs[3]); imm 318 arch/sh/kernel/disassemble.c if (imm & 0x80) imm 319 arch/sh/kernel/disassemble.c imm |= ~0xff; imm 320 arch/sh/kernel/disassemble.c imm = ((char)imm) * 2 + 4 ; imm 323 arch/sh/kernel/disassemble.c imm = ((nibs[1]) << 8) | (nibs[2] << 4) | (nibs[3]); imm 324 arch/sh/kernel/disassemble.c if (imm & 0x800) imm 325 arch/sh/kernel/disassemble.c imm |= ~0xfff; imm 326 arch/sh/kernel/disassemble.c imm = imm * 2 + 4; imm 329 arch/sh/kernel/disassemble.c imm = nibs[3]; imm 332 arch/sh/kernel/disassemble.c imm = nibs[3] <<1; imm 335 arch/sh/kernel/disassemble.c imm = nibs[3] <<2; imm 338 arch/sh/kernel/disassemble.c imm = (nibs[2] << 4) | nibs[3]; imm 341 arch/sh/kernel/disassemble.c imm = ((nibs[2] << 4) | nibs[3]) <<1; imm 345 arch/sh/kernel/disassemble.c imm = ((nibs[2] << 4) | nibs[3]) <<2; imm 349 arch/sh/kernel/disassemble.c imm = ((nibs[2] << 4) | nibs[3]) <<1; imm 352 arch/sh/kernel/disassemble.c imm = ((nibs[2] << 4) | nibs[3]) <<2; imm 355 arch/sh/kernel/disassemble.c imm = (nibs[2] << 4) | (nibs[3]); imm 358 arch/sh/kernel/disassemble.c imm = nibs[3]; imm 387 arch/sh/kernel/disassemble.c printk("#%d", (char)(imm)); imm 405 arch/sh/kernel/disassemble.c printk("@(%d,r%d)", imm, rn); imm 420 arch/sh/kernel/disassemble.c printk("@(%d,r%d)", imm, rm); imm 427 arch/sh/kernel/disassemble.c disp_pc_addr = imm + 4 + (memaddr & relmask); imm 438 arch/sh/kernel/disassemble.c printk("@(%d,gbr)",imm); imm 445 arch/sh/kernel/disassemble.c printk("%08x", imm + memaddr); imm 88 arch/sparc/kernel/unaligned_32.c static inline int sign_extend_imm13(int imm) imm 90 arch/sparc/kernel/unaligned_32.c return imm << 19 >> 19; imm 115 arch/sparc/kernel/unaligned_64.c static inline long sign_extend_imm13(long imm) imm 117 arch/sparc/kernel/unaligned_64.c return imm << 51 >> 51; imm 298 arch/sparc/net/bpf_jit_comp_64.c static void emit_alu_K(unsigned int opcode, unsigned int dst, unsigned int imm, imm 301 arch/sparc/net/bpf_jit_comp_64.c bool small_immed = is_simm13(imm); imm 306 arch/sparc/net/bpf_jit_comp_64.c emit(insn | IMMED | S13(imm), ctx); imm 312 arch/sparc/net/bpf_jit_comp_64.c emit_set_const_sext(imm, tmp, ctx); imm 317 arch/sparc/net/bpf_jit_comp_64.c static void emit_alu3_K(unsigned int opcode, unsigned int src, unsigned int imm, imm 320 arch/sparc/net/bpf_jit_comp_64.c bool small_immed = is_simm13(imm); imm 325 arch/sparc/net/bpf_jit_comp_64.c emit(insn | IMMED | S13(imm), ctx); imm 331 arch/sparc/net/bpf_jit_comp_64.c emit_set_const_sext(imm, tmp, ctx); imm 636 arch/sparc/net/bpf_jit_comp_64.c const u8 dst, s32 imm, struct jit_ctx *ctx) imm 640 arch/sparc/net/bpf_jit_comp_64.c emit(cb_opc | IMMED | WDISP10(off << 2) | RS1(dst) | S5(imm), ctx); imm 659 arch/sparc/net/bpf_jit_comp_64.c const s32 imm, bool is_imm, int branch_dst, imm 675 arch/sparc/net/bpf_jit_comp_64.c if (!is_simm5(imm)) imm 677 arch/sparc/net/bpf_jit_comp_64.c } else if (!is_simm13(imm)) { imm 682 arch/sparc/net/bpf_jit_comp_64.c emit_loadimm_sext(imm, tmp, ctx); imm 693 arch/sparc/net/bpf_jit_comp_64.c emit_btsti(dst, imm, ctx); imm 698 arch/sparc/net/bpf_jit_comp_64.c emit_cmpi(dst, imm, ctx); imm 785 arch/sparc/net/bpf_jit_comp_64.c dst, imm, ctx); imm 902 arch/sparc/net/bpf_jit_comp_64.c const s32 imm = insn->imm; imm 1002 arch/sparc/net/bpf_jit_comp_64.c switch (imm) { imm 1026 arch/sparc/net/bpf_jit_comp_64.c switch (imm) { imm 1065 arch/sparc/net/bpf_jit_comp_64.c emit_loadimm32(imm, dst, ctx); imm 1070 arch/sparc/net/bpf_jit_comp_64.c emit_loadimm_sext(imm, dst, ctx); imm 1075 arch/sparc/net/bpf_jit_comp_64.c emit_alu_K(ADD, dst, imm, ctx); imm 1079 arch/sparc/net/bpf_jit_comp_64.c emit_alu_K(SUB, dst, imm, ctx); imm 1083 arch/sparc/net/bpf_jit_comp_64.c emit_alu_K(AND, dst, imm, ctx); imm 1087 arch/sparc/net/bpf_jit_comp_64.c emit_alu_K(OR, dst, imm, ctx); imm 1091 arch/sparc/net/bpf_jit_comp_64.c emit_alu_K(XOR, dst, imm, ctx); imm 1094 arch/sparc/net/bpf_jit_comp_64.c emit_alu_K(MUL, dst, imm, ctx); imm 1097 arch/sparc/net/bpf_jit_comp_64.c emit_alu_K(MULX, dst, imm, ctx); imm 1100 arch/sparc/net/bpf_jit_comp_64.c if (imm == 0) imm 1104 arch/sparc/net/bpf_jit_comp_64.c emit_alu_K(DIV, dst, imm, ctx); imm 1107 arch/sparc/net/bpf_jit_comp_64.c if (imm == 0) imm 1110 arch/sparc/net/bpf_jit_comp_64.c emit_alu_K(UDIVX, dst, imm, ctx); imm 1117 arch/sparc/net/bpf_jit_comp_64.c if (imm == 0) imm 1126 arch/sparc/net/bpf_jit_comp_64.c if (is_simm13(imm)) { imm 1127 arch/sparc/net/bpf_jit_comp_64.c emit(div | IMMED | RS1(dst) | S13(imm) | RD(tmp), ctx); imm 1128 arch/sparc/net/bpf_jit_comp_64.c emit(MULX | IMMED | RS1(tmp) | S13(imm) | RD(tmp), ctx); imm 1135 arch/sparc/net/bpf_jit_comp_64.c emit_set_const_sext(imm, tmp1, ctx); imm 1143 arch/sparc/net/bpf_jit_comp_64.c emit_alu_K(SLL, dst, imm, ctx); imm 1146 arch/sparc/net/bpf_jit_comp_64.c emit_alu_K(SLLX, dst, imm, ctx); imm 1149 arch/sparc/net/bpf_jit_comp_64.c emit_alu_K(SRL, dst, imm, ctx); imm 1154 arch/sparc/net/bpf_jit_comp_64.c emit_alu_K(SRLX, dst, imm, ctx); imm 1157 arch/sparc/net/bpf_jit_comp_64.c emit_alu_K(SRA, dst, imm, ctx); imm 1160 arch/sparc/net/bpf_jit_comp_64.c emit_alu_K(SRAX, dst, imm, ctx); imm 1207 arch/sparc/net/bpf_jit_comp_64.c err = emit_compare_and_branch(code, dst, 0, imm, true, i + off, ctx); imm 1216 arch/sparc/net/bpf_jit_comp_64.c u8 *func = ((u8 *)__bpf_call_base) + imm; imm 1248 arch/sparc/net/bpf_jit_comp_64.c imm64 = (u64)insn1.imm << 32 | (u32)imm; imm 1303 arch/sparc/net/bpf_jit_comp_64.c emit_loadimm(imm, tmp2, ctx); imm 84 arch/x86/include/asm/inat.h #define INAT_MAKE_IMM(imm) (imm << INAT_IMM_OFFS) imm 409 arch/x86/net/bpf_jit_comp.c const s32 imm32 = insn->imm; imm 518 arch/x86/net/bpf_jit_comp.c emit_mov_imm64(&prog, dst_reg, insn[1].imm, insn[0].imm); imm 1476 arch/x86/net/bpf_jit_comp32.c const s32 imm32 = insn->imm; imm 1701 arch/x86/net/bpf_jit_comp32.c hi = insn[1].imm; imm 721 drivers/crypto/chelsio/chcr_algo.c unsigned int imm, imm 746 drivers/crypto/chelsio/chcr_algo.c chcr_req->sc_imm.cmd_more = FILL_CMD_MORE(!imm); imm 783 drivers/crypto/chelsio/chcr_algo.c temp = reqctx->imm ? roundup(wrparam->bytes, 16) : imm 837 drivers/crypto/chelsio/chcr_algo.c + (reqctx->imm ? (wrparam->bytes) : 0); imm 838 drivers/crypto/chelsio/chcr_algo.c create_wreq(c_ctx(tfm), chcr_req, &(wrparam->req->base), reqctx->imm, 0, imm 1133 drivers/crypto/chelsio/chcr_algo.c if (!reqctx->imm) { imm 1227 drivers/crypto/chelsio/chcr_algo.c reqctx->imm = (transhdr_len + IV + req->nbytes) <= imm 1232 drivers/crypto/chelsio/chcr_algo.c reqctx->imm = 0; imm 1235 drivers/crypto/chelsio/chcr_algo.c if (!reqctx->imm) { imm 1509 drivers/crypto/chelsio/chcr_algo.c req_ctx->hctx_wr.imm = (transhdr_len + param->bfr_len + imm 1514 drivers/crypto/chelsio/chcr_algo.c transhdr_len += req_ctx->hctx_wr.imm ? roundup(param->bfr_len + imm 1570 drivers/crypto/chelsio/chcr_algo.c temp = param->kctx_len + DUMMY_BYTES + (req_ctx->hctx_wr.imm ? imm 1573 drivers/crypto/chelsio/chcr_algo.c create_wreq(h_ctx(tfm), chcr_req, &req->base, req_ctx->hctx_wr.imm, imm 2366 drivers/crypto/chelsio/chcr_algo.c reqctx->imm = (transhdr_len + req->assoclen + req->cryptlen) < imm 2368 drivers/crypto/chelsio/chcr_algo.c temp = reqctx->imm ? roundup(req->assoclen + req->cryptlen, 16) imm 2448 drivers/crypto/chelsio/chcr_algo.c kctx_len + (reqctx->imm ? (req->assoclen + req->cryptlen) : 0); imm 2449 drivers/crypto/chelsio/chcr_algo.c create_wreq(a_ctx(tfm), chcr_req, &req->base, reqctx->imm, size, imm 2540 drivers/crypto/chelsio/chcr_algo.c if (reqctx->imm) { imm 2589 drivers/crypto/chelsio/chcr_algo.c if (reqctx->imm) { imm 2628 drivers/crypto/chelsio/chcr_algo.c if (reqctx->hctx_wr.imm) { imm 2925 drivers/crypto/chelsio/chcr_algo.c reqctx->imm = (transhdr_len + req->assoclen + req->cryptlen + imm 2927 drivers/crypto/chelsio/chcr_algo.c temp = reqctx->imm ? roundup(req->assoclen + req->cryptlen + imm 2966 drivers/crypto/chelsio/chcr_algo.c kctx_len + (reqctx->imm ? (req->assoclen + req->cryptlen + imm 2968 drivers/crypto/chelsio/chcr_algo.c create_wreq(a_ctx(tfm), chcr_req, &req->base, reqctx->imm, 0, imm 3016 drivers/crypto/chelsio/chcr_algo.c reqctx->imm = (transhdr_len + req->assoclen + req->cryptlen) <= imm 3018 drivers/crypto/chelsio/chcr_algo.c temp = reqctx->imm ? roundup(req->assoclen + req->cryptlen, 16) : imm 3082 drivers/crypto/chelsio/chcr_algo.c kctx_len + (reqctx->imm ? (req->assoclen + req->cryptlen) : 0); imm 3083 drivers/crypto/chelsio/chcr_algo.c create_wreq(a_ctx(tfm), chcr_req, &req->base, reqctx->imm, size, imm 188 drivers/crypto/chelsio/chcr_crypto.h u16 imm; imm 269 drivers/crypto/chelsio/chcr_crypto.h u8 imm; imm 297 drivers/crypto/chelsio/chcr_crypto.h u16 imm; imm 3194 drivers/infiniband/hw/mlx4/qp.c memcpy(&ctrl->imm, ah->av.eth.mac + 2, 4); imm 3612 drivers/infiniband/hw/mlx4/qp.c ctrl->imm = send_ieth(wr); imm 4909 drivers/infiniband/hw/mlx5/qp.c (*ctrl)->imm = send_ieth(wr); imm 5069 drivers/infiniband/hw/mlx5/qp.c ctrl->imm = cpu_to_be32(wr->ex.invalidate_rkey); imm 5076 drivers/infiniband/hw/mlx5/qp.c ctrl->imm = cpu_to_be32(reg_wr(wr)->key); imm 5100 drivers/infiniband/hw/mlx5/qp.c ctrl->imm = cpu_to_be32(reg_pi_wr.key); imm 5144 drivers/infiniband/hw/mlx5/qp.c ctrl->imm = cpu_to_be32(mr->ibmr.rkey); imm 5270 drivers/infiniband/hw/mlx5/qp.c ctrl->imm = cpu_to_be32(umr_wr(wr)->mkey); imm 1958 drivers/infiniband/hw/mthca/mthca_cmd.c u64 imm; imm 1961 drivers/infiniband/hw/mthca/mthca_cmd.c err = mthca_cmd_imm(dev, mailbox->dma, &imm, 0, 0, CMD_MGID_HASH, imm 1964 drivers/infiniband/hw/mthca/mthca_cmd.c *hash = imm; imm 1678 drivers/infiniband/hw/mthca/mthca_qp.c ((struct mthca_next_seg *) wqe)->imm = wr->ex.imm_data; imm 2008 drivers/infiniband/hw/mthca/mthca_qp.c ((struct mthca_next_seg *) wqe)->imm = wr->ex.imm_data; imm 94 drivers/infiniband/hw/mthca/mthca_srq.c return (int *) (wqe + offsetof(struct mthca_next_seg, imm)); imm 61 drivers/infiniband/hw/mthca/mthca_wqe.h __be32 imm; /* immediate data */ imm 123 drivers/infiniband/sw/rdmavt/trace_cq.h __field(u32, imm) imm 134 drivers/infiniband/sw/rdmavt/trace_cq.h __entry->imm = be32_to_cpu(wc->ex.imm_data); imm 146 drivers/infiniband/sw/rdmavt/trace_cq.h __entry->imm imm 869 drivers/infiniband/sw/rxe/rxe_hdr.h __be32 imm; imm 876 drivers/infiniband/sw/rxe/rxe_hdr.h return immdt->imm; imm 879 drivers/infiniband/sw/rxe/rxe_hdr.h static inline void __immdt_set_imm(void *arg, __be32 imm) imm 883 drivers/infiniband/sw/rxe/rxe_hdr.h immdt->imm = imm; imm 892 drivers/infiniband/sw/rxe/rxe_hdr.h static inline void immdt_set_imm(struct rxe_pkt_info *pkt, __be32 imm) imm 895 drivers/infiniband/sw/rxe/rxe_hdr.h + rxe_opcode[pkt->opcode].offset[RXE_IMMDT], imm); imm 1441 drivers/net/ethernet/cavium/thunder/nicvf_queues.c struct sq_imm_subdesc *imm; imm 1459 drivers/net/ethernet/cavium/thunder/nicvf_queues.c imm = (struct sq_imm_subdesc *)GET_SQ_DESC(sq, qentry); imm 1460 drivers/net/ethernet/cavium/thunder/nicvf_queues.c memset(imm, 0, SND_QUEUE_DESC_SIZE); imm 1461 drivers/net/ethernet/cavium/thunder/nicvf_queues.c imm->subdesc_type = SQ_DESC_TYPE_IMMEDIATE; imm 1462 drivers/net/ethernet/cavium/thunder/nicvf_queues.c imm->len = 1; imm 719 drivers/net/ethernet/chelsio/cxgb4/cxgb4.h unsigned long imm; /* # of immediate-data packets */ imm 129 drivers/net/ethernet/chelsio/cxgb4/cxgb4_uld.c rxq->stats.imm++; imm 981 drivers/net/ethernet/mellanox/mlx4/en_tx.c tx_desc->ctrl.imm = get_unaligned((__be32 *)(ethh->h_dest + 2)); imm 58 drivers/net/ethernet/mellanox/mlx4/mcg.c u64 imm; imm 61 drivers/net/ethernet/mellanox/mlx4/mcg.c err = mlx4_cmd_imm(dev, mailbox->dma, &imm, size, 0, imm 66 drivers/net/ethernet/mellanox/mlx4/mcg.c *reg_id = imm; imm 110 drivers/net/ethernet/mellanox/mlx4/mcg.c u64 imm; imm 113 drivers/net/ethernet/mellanox/mlx4/mcg.c err = mlx4_cmd_imm(dev, mailbox->dma, &imm, 0, op_mod, imm 118 drivers/net/ethernet/mellanox/mlx4/mcg.c *hash = imm; imm 243 drivers/net/ethernet/mellanox/mlx5/core/en_main.c cseg->imm = rq->mkey_be; imm 164 drivers/net/ethernet/mellanox/mlx5/core/fpga/conn.c ctrl->imm = 0; imm 240 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_send.c wq_ctrl->imm = 0; imm 294 drivers/net/ethernet/netronome/nfp/bpf/jit.c emit_immed(struct nfp_prog *nfp_prog, swreg dst, u16 imm, imm 305 drivers/net/ethernet/netronome/nfp/bpf/jit.c err = swreg_to_unrestricted(dst, dst, reg_imm(imm & 0xff), ®); imm 314 drivers/net/ethernet/netronome/nfp/bpf/jit.c reg.breg, imm >> 8, width, invert, shift, imm 590 drivers/net/ethernet/netronome/nfp/bpf/jit.c static bool pack_immed(u32 imm, u16 *val, enum immed_shift *shift) imm 592 drivers/net/ethernet/netronome/nfp/bpf/jit.c if (!(imm & 0xffff0000)) { imm 593 drivers/net/ethernet/netronome/nfp/bpf/jit.c *val = imm; imm 595 drivers/net/ethernet/netronome/nfp/bpf/jit.c } else if (!(imm & 0xff0000ff)) { imm 596 drivers/net/ethernet/netronome/nfp/bpf/jit.c *val = imm >> 8; imm 598 drivers/net/ethernet/netronome/nfp/bpf/jit.c } else if (!(imm & 0x0000ffff)) { imm 599 drivers/net/ethernet/netronome/nfp/bpf/jit.c *val = imm >> 16; imm 608 drivers/net/ethernet/netronome/nfp/bpf/jit.c static void wrp_immed(struct nfp_prog *nfp_prog, swreg dst, u32 imm) imm 613 drivers/net/ethernet/netronome/nfp/bpf/jit.c if (pack_immed(imm, &val, &shift)) { imm 615 drivers/net/ethernet/netronome/nfp/bpf/jit.c } else if (pack_immed(~imm, &val, &shift)) { imm 618 drivers/net/ethernet/netronome/nfp/bpf/jit.c emit_immed(nfp_prog, dst, imm & 0xffff, IMMED_WIDTH_ALL, imm 620 drivers/net/ethernet/netronome/nfp/bpf/jit.c emit_immed(nfp_prog, dst, imm >> 16, IMMED_WIDTH_WORD, imm 633 drivers/net/ethernet/netronome/nfp/bpf/jit.c wrp_immed_relo(struct nfp_prog *nfp_prog, swreg dst, u32 imm, imm 636 drivers/net/ethernet/netronome/nfp/bpf/jit.c if (imm > 0xffff) { imm 641 drivers/net/ethernet/netronome/nfp/bpf/jit.c emit_immed(nfp_prog, dst, imm, IMMED_WIDTH_ALL, false, IMMED_SHIFT_0B); imm 651 drivers/net/ethernet/netronome/nfp/bpf/jit.c static swreg ur_load_imm_any(struct nfp_prog *nfp_prog, u32 imm, swreg tmp_reg) imm 653 drivers/net/ethernet/netronome/nfp/bpf/jit.c if (FIELD_FIT(UR_REG_IMM_MAX, imm)) imm 654 drivers/net/ethernet/netronome/nfp/bpf/jit.c return reg_imm(imm); imm 656 drivers/net/ethernet/netronome/nfp/bpf/jit.c wrp_immed(nfp_prog, tmp_reg, imm); imm 664 drivers/net/ethernet/netronome/nfp/bpf/jit.c static swreg re_load_imm_any(struct nfp_prog *nfp_prog, u32 imm, swreg tmp_reg) imm 666 drivers/net/ethernet/netronome/nfp/bpf/jit.c if (FIELD_FIT(RE_REG_IMM_MAX, imm)) imm 667 drivers/net/ethernet/netronome/nfp/bpf/jit.c return reg_imm(imm); imm 669 drivers/net/ethernet/netronome/nfp/bpf/jit.c wrp_immed(nfp_prog, tmp_reg, imm); imm 1002 drivers/net/ethernet/netronome/nfp/bpf/jit.c u64 imm, u8 size) imm 1004 drivers/net/ethernet/netronome/nfp/bpf/jit.c wrp_immed(nfp_prog, reg_xfer(0), imm); imm 1006 drivers/net/ethernet/netronome/nfp/bpf/jit.c wrp_immed(nfp_prog, reg_xfer(1), imm >> 32); imm 1265 drivers/net/ethernet/netronome/nfp/bpf/jit.c wrp_alu_imm(struct nfp_prog *nfp_prog, u8 dst, enum alu_op alu_op, u32 imm) imm 1270 drivers/net/ethernet/netronome/nfp/bpf/jit.c if (!imm) imm 1272 drivers/net/ethernet/netronome/nfp/bpf/jit.c if (!imm || !~imm) imm 1276 drivers/net/ethernet/netronome/nfp/bpf/jit.c if (!~imm) imm 1278 drivers/net/ethernet/netronome/nfp/bpf/jit.c if (!imm || !~imm) imm 1282 drivers/net/ethernet/netronome/nfp/bpf/jit.c if (!~imm) imm 1285 drivers/net/ethernet/netronome/nfp/bpf/jit.c if (!imm || !~imm) imm 1289 drivers/net/ethernet/netronome/nfp/bpf/jit.c tmp_reg = ur_load_imm_any(nfp_prog, imm, imm_b(nfp_prog)); imm 1298 drivers/net/ethernet/netronome/nfp/bpf/jit.c u64 imm = insn->imm; /* sign extend */ imm 1305 drivers/net/ethernet/netronome/nfp/bpf/jit.c wrp_alu_imm(nfp_prog, insn->dst_reg * 2, alu_op, imm & ~0U); imm 1306 drivers/net/ethernet/netronome/nfp/bpf/jit.c wrp_alu_imm(nfp_prog, insn->dst_reg * 2 + 1, alu_op, imm >> 32); imm 1331 drivers/net/ethernet/netronome/nfp/bpf/jit.c wrp_alu_imm(nfp_prog, dst, alu_op, insn->imm); imm 1403 drivers/net/ethernet/netronome/nfp/bpf/jit.c u64 imm = insn->imm; /* sign extend */ imm 1416 drivers/net/ethernet/netronome/nfp/bpf/jit.c tmp_reg = ur_load_imm_any(nfp_prog, imm & ~0U, imm_b(nfp_prog)); imm 1423 drivers/net/ethernet/netronome/nfp/bpf/jit.c tmp_reg = ur_load_imm_any(nfp_prog, imm >> 32, imm_b(nfp_prog)); imm 1518 drivers/net/ethernet/netronome/nfp/bpf/jit.c u32 imm = insn->imm; imm 1520 drivers/net/ethernet/netronome/nfp/bpf/jit.c multiplier = ur_load_imm_any(nfp_prog, imm, imm_b(nfp_prog)); imm 1521 drivers/net/ethernet/netronome/nfp/bpf/jit.c ropnd_max = imm; imm 1532 drivers/net/ethernet/netronome/nfp/bpf/jit.c static int wrp_div_imm(struct nfp_prog *nfp_prog, u8 dst, u64 imm) imm 1539 drivers/net/ethernet/netronome/nfp/bpf/jit.c if (imm > U32_MAX) { imm 1555 drivers/net/ethernet/netronome/nfp/bpf/jit.c if (imm > 1U << 31) { imm 1556 drivers/net/ethernet/netronome/nfp/bpf/jit.c swreg tmp_b = ur_load_imm_any(nfp_prog, imm, imm_b(nfp_prog)); imm 1565 drivers/net/ethernet/netronome/nfp/bpf/jit.c rvalue = reciprocal_value_adv(imm, 32); imm 1567 drivers/net/ethernet/netronome/nfp/bpf/jit.c if (rvalue.is_wide_m && !(imm & 1)) { imm 1568 drivers/net/ethernet/netronome/nfp/bpf/jit.c pre_shift = fls(imm & -imm) - 1; imm 1569 drivers/net/ethernet/netronome/nfp/bpf/jit.c rvalue = reciprocal_value_adv(imm >> pre_shift, 32 - pre_shift); imm 1574 drivers/net/ethernet/netronome/nfp/bpf/jit.c if (imm == 1U << exp) { imm 1861 drivers/net/ethernet/netronome/nfp/bpf/jit.c u64 imm = meta->insn.imm; /* sign extend */ imm 1863 drivers/net/ethernet/netronome/nfp/bpf/jit.c wrp_immed(nfp_prog, reg_both(meta->insn.dst_reg * 2), imm & ~0U); imm 1864 drivers/net/ethernet/netronome/nfp/bpf/jit.c wrp_immed(nfp_prog, reg_both(meta->insn.dst_reg * 2 + 1), imm >> 32); imm 1876 drivers/net/ethernet/netronome/nfp/bpf/jit.c return wrp_alu64_imm(nfp_prog, meta, ALU_OP_XOR, !meta->insn.imm); imm 1886 drivers/net/ethernet/netronome/nfp/bpf/jit.c return wrp_alu64_imm(nfp_prog, meta, ALU_OP_AND, !~meta->insn.imm); imm 1896 drivers/net/ethernet/netronome/nfp/bpf/jit.c return wrp_alu64_imm(nfp_prog, meta, ALU_OP_OR, !meta->insn.imm); imm 1916 drivers/net/ethernet/netronome/nfp/bpf/jit.c u64 imm = insn->imm; /* sign extend */ imm 1918 drivers/net/ethernet/netronome/nfp/bpf/jit.c wrp_alu_imm(nfp_prog, insn->dst_reg * 2, ALU_OP_ADD, imm & ~0U); imm 1919 drivers/net/ethernet/netronome/nfp/bpf/jit.c wrp_alu_imm(nfp_prog, insn->dst_reg * 2 + 1, ALU_OP_ADD_C, imm >> 32); imm 1941 drivers/net/ethernet/netronome/nfp/bpf/jit.c u64 imm = insn->imm; /* sign extend */ imm 1943 drivers/net/ethernet/netronome/nfp/bpf/jit.c wrp_alu_imm(nfp_prog, insn->dst_reg * 2, ALU_OP_SUB, imm & ~0U); imm 1944 drivers/net/ethernet/netronome/nfp/bpf/jit.c wrp_alu_imm(nfp_prog, insn->dst_reg * 2 + 1, ALU_OP_SUB_C, imm >> 32); imm 1963 drivers/net/ethernet/netronome/nfp/bpf/jit.c return wrp_div_imm(nfp_prog, insn->dst_reg * 2, insn->imm); imm 2024 drivers/net/ethernet/netronome/nfp/bpf/jit.c return __shl_imm64(nfp_prog, dst, insn->imm); imm 2138 drivers/net/ethernet/netronome/nfp/bpf/jit.c return __shr_imm64(nfp_prog, dst, insn->imm); imm 2250 drivers/net/ethernet/netronome/nfp/bpf/jit.c return __ashr_imm64(nfp_prog, dst, insn->imm); imm 2340 drivers/net/ethernet/netronome/nfp/bpf/jit.c wrp_immed(nfp_prog, reg_both(insn->dst_reg * 2), insn->imm); imm 2471 drivers/net/ethernet/netronome/nfp/bpf/jit.c return __ashr_imm(nfp_prog, meta, dst, insn->imm); imm 2490 drivers/net/ethernet/netronome/nfp/bpf/jit.c return __shr_imm(nfp_prog, meta, dst, insn->imm); imm 2529 drivers/net/ethernet/netronome/nfp/bpf/jit.c return __shl_imm(nfp_prog, meta, dst, insn->imm); imm 2555 drivers/net/ethernet/netronome/nfp/bpf/jit.c switch (insn->imm) { imm 2586 drivers/net/ethernet/netronome/nfp/bpf/jit.c imm_lo = prev->insn.imm; imm 2587 drivers/net/ethernet/netronome/nfp/bpf/jit.c imm_hi = meta->insn.imm; imm 2608 drivers/net/ethernet/netronome/nfp/bpf/jit.c return construct_data_ld(nfp_prog, meta, meta->insn.imm, 1); imm 2613 drivers/net/ethernet/netronome/nfp/bpf/jit.c return construct_data_ld(nfp_prog, meta, meta->insn.imm, 2); imm 2618 drivers/net/ethernet/netronome/nfp/bpf/jit.c return construct_data_ld(nfp_prog, meta, meta->insn.imm, 4); imm 2623 drivers/net/ethernet/netronome/nfp/bpf/jit.c return construct_data_ind_ld(nfp_prog, meta, meta->insn.imm, imm 2629 drivers/net/ethernet/netronome/nfp/bpf/jit.c return construct_data_ind_ld(nfp_prog, meta, meta->insn.imm, imm 2635 drivers/net/ethernet/netronome/nfp/bpf/jit.c return construct_data_ind_ld(nfp_prog, meta, meta->insn.imm, imm 2910 drivers/net/ethernet/netronome/nfp/bpf/jit.c u64 imm = meta->insn.imm; /* sign extend */ imm 2916 drivers/net/ethernet/netronome/nfp/bpf/jit.c imm, size); imm 3132 drivers/net/ethernet/netronome/nfp/bpf/jit.c u64 imm = insn->imm; /* sign extend */ imm 3138 drivers/net/ethernet/netronome/nfp/bpf/jit.c if (imm & ~0U) { imm 3139 drivers/net/ethernet/netronome/nfp/bpf/jit.c tmp_reg = ur_load_imm_any(nfp_prog, imm & ~0U, imm_b(nfp_prog)); imm 3145 drivers/net/ethernet/netronome/nfp/bpf/jit.c if (imm >> 32) { imm 3146 drivers/net/ethernet/netronome/nfp/bpf/jit.c tmp_reg = ur_load_imm_any(nfp_prog, imm >> 32, imm_b(nfp_prog)); imm 3163 drivers/net/ethernet/netronome/nfp/bpf/jit.c tmp_reg = ur_load_imm_any(nfp_prog, insn->imm, imm_b(nfp_prog)); imm 3174 drivers/net/ethernet/netronome/nfp/bpf/jit.c u64 imm = insn->imm; /* sign extend */ imm 3178 drivers/net/ethernet/netronome/nfp/bpf/jit.c tmp_reg = ur_load_imm_any(nfp_prog, imm & ~0U, imm_b(nfp_prog)); imm 3184 drivers/net/ethernet/netronome/nfp/bpf/jit.c if (is_mbpf_jmp64(meta) && imm >> 32) { imm 3196 drivers/net/ethernet/netronome/nfp/bpf/jit.c u64 imm = insn->imm; /* sign extend */ imm 3200 drivers/net/ethernet/netronome/nfp/bpf/jit.c if (!imm) { imm 3211 drivers/net/ethernet/netronome/nfp/bpf/jit.c tmp_reg = ur_load_imm_any(nfp_prog, imm & ~0U, imm_b(nfp_prog)); imm 3219 drivers/net/ethernet/netronome/nfp/bpf/jit.c tmp_reg = ur_load_imm_any(nfp_prog, imm >> 32, imm_b(nfp_prog)); imm 3317 drivers/net/ethernet/netronome/nfp/bpf/jit.c emit_br(nfp_prog, BR_UNC, meta->insn.imm, 1); imm 3343 drivers/net/ethernet/netronome/nfp/bpf/jit.c switch (meta->insn.imm) { imm 3922 drivers/net/ethernet/netronome/nfp/bpf/jit.c if (insn.imm >= 0) imm 3947 drivers/net/ethernet/netronome/nfp/bpf/jit.c meta->insn.imm = -insn.imm; imm 3978 drivers/net/ethernet/netronome/nfp/bpf/jit.c if (exp_mask[BPF_SIZE(insn.code)] != next.imm) imm 4020 drivers/net/ethernet/netronome/nfp/bpf/jit.c if (next1.imm != 0x20 || next2.imm != 0x20) imm 4413 drivers/net/ethernet/netronome/nfp/bpf/jit.c map = (void *)(unsigned long)((u32)meta1->insn.imm | imm 4414 drivers/net/ethernet/netronome/nfp/bpf/jit.c (u64)meta2->insn.imm << 32); imm 4422 drivers/net/ethernet/netronome/nfp/bpf/jit.c meta1->insn.imm = id; imm 4423 drivers/net/ethernet/netronome/nfp/bpf/jit.c meta2->insn.imm = 0; imm 4509 drivers/net/ethernet/netronome/nfp/bpf/jit.c dst_idx = meta->n + 1 + meta->insn.imm; imm 53 drivers/net/ethernet/netronome/nfp/bpf/verifier.c int imm; imm 61 drivers/net/ethernet/netronome/nfp/bpf/verifier.c imm = reg2->var_off.value; imm 63 drivers/net/ethernet/netronome/nfp/bpf/verifier.c if (imm > ETH_ZLEN - ETH_HLEN) imm 65 drivers/net/ethernet/netronome/nfp/bpf/verifier.c if (imm > (int)bpf->adjust_head.guaranteed_add || imm 66 drivers/net/ethernet/netronome/nfp/bpf/verifier.c imm < -bpf->adjust_head.guaranteed_sub) imm 74 drivers/net/ethernet/netronome/nfp/bpf/verifier.c if (meta->arg2.reg.var_off.value != imm) imm 178 drivers/net/ethernet/netronome/nfp/bpf/verifier.c u32 func_id = meta->insn.imm; imm 315 drivers/net/ethernet/netronome/nfp/bpf/verifier.c u64 imm; imm 329 drivers/net/ethernet/netronome/nfp/bpf/verifier.c imm = reg0->var_off.value; imm 331 drivers/net/ethernet/netronome/nfp/bpf/verifier.c imm <= TC_ACT_REDIRECT && imm 332 drivers/net/ethernet/netronome/nfp/bpf/verifier.c imm != TC_ACT_SHOT && imm != TC_ACT_STOLEN && imm 333 drivers/net/ethernet/netronome/nfp/bpf/verifier.c imm != TC_ACT_QUEUED) { imm 335 drivers/net/ethernet/netronome/nfp/bpf/verifier.c reg0->type, imm); imm 586 drivers/net/ethernet/netronome/nfp/bpf/verifier.c mbpf_src(meta) == BPF_K && meta->insn.imm < 0) { imm 617 drivers/net/ethernet/netronome/nfp/bpf/verifier.c if (mbpf_src(meta) == BPF_K && meta->insn.imm < 0) { imm 728 drivers/net/ethernet/netronome/nfp/bpf/verifier.c meta->n + 1 + meta->insn.imm); imm 84 drivers/net/ethernet/qlogic/qed/qed_debug.c static u32 cond5(const u32 *r, const u32 *imm) imm 86 drivers/net/ethernet/qlogic/qed/qed_debug.c return ((r[0] & imm[0]) != imm[1]) && ((r[1] & imm[2]) != imm[3]); imm 89 drivers/net/ethernet/qlogic/qed/qed_debug.c static u32 cond7(const u32 *r, const u32 *imm) imm 91 drivers/net/ethernet/qlogic/qed/qed_debug.c return ((r[0] >> imm[0]) & imm[1]) != imm[2]; imm 94 drivers/net/ethernet/qlogic/qed/qed_debug.c static u32 cond6(const u32 *r, const u32 *imm) imm 96 drivers/net/ethernet/qlogic/qed/qed_debug.c return (r[0] & imm[0]) != imm[1]; imm 99 drivers/net/ethernet/qlogic/qed/qed_debug.c static u32 cond9(const u32 *r, const u32 *imm) imm 101 drivers/net/ethernet/qlogic/qed/qed_debug.c return ((r[0] & imm[0]) >> imm[1]) != imm 102 drivers/net/ethernet/qlogic/qed/qed_debug.c (((r[0] & imm[2]) >> imm[3]) | ((r[1] & imm[4]) << imm[5])); imm 105 drivers/net/ethernet/qlogic/qed/qed_debug.c static u32 cond10(const u32 *r, const u32 *imm) imm 107 drivers/net/ethernet/qlogic/qed/qed_debug.c return ((r[0] & imm[0]) >> imm[1]) != (r[0] & imm[2]); imm 110 drivers/net/ethernet/qlogic/qed/qed_debug.c static u32 cond4(const u32 *r, const u32 *imm) imm 112 drivers/net/ethernet/qlogic/qed/qed_debug.c return (r[0] & ~imm[0]) != imm[1]; imm 115 drivers/net/ethernet/qlogic/qed/qed_debug.c static u32 cond0(const u32 *r, const u32 *imm) imm 117 drivers/net/ethernet/qlogic/qed/qed_debug.c return (r[0] & ~r[1]) != imm[0]; imm 120 drivers/net/ethernet/qlogic/qed/qed_debug.c static u32 cond1(const u32 *r, const u32 *imm) imm 122 drivers/net/ethernet/qlogic/qed/qed_debug.c return r[0] != imm[0]; imm 125 drivers/net/ethernet/qlogic/qed/qed_debug.c static u32 cond11(const u32 *r, const u32 *imm) imm 127 drivers/net/ethernet/qlogic/qed/qed_debug.c return r[0] != r[1] && r[2] == imm[0]; imm 130 drivers/net/ethernet/qlogic/qed/qed_debug.c static u32 cond12(const u32 *r, const u32 *imm) imm 132 drivers/net/ethernet/qlogic/qed/qed_debug.c return r[0] != r[1] && r[2] > imm[0]; imm 135 drivers/net/ethernet/qlogic/qed/qed_debug.c static u32 cond3(const u32 *r, const u32 *imm) imm 140 drivers/net/ethernet/qlogic/qed/qed_debug.c static u32 cond13(const u32 *r, const u32 *imm) imm 142 drivers/net/ethernet/qlogic/qed/qed_debug.c return r[0] & imm[0]; imm 145 drivers/net/ethernet/qlogic/qed/qed_debug.c static u32 cond8(const u32 *r, const u32 *imm) imm 147 drivers/net/ethernet/qlogic/qed/qed_debug.c return r[0] < (r[1] - imm[0]); imm 150 drivers/net/ethernet/qlogic/qed/qed_debug.c static u32 cond2(const u32 *r, const u32 *imm) imm 152 drivers/net/ethernet/qlogic/qed/qed_debug.c return r[0] > imm[0]; imm 156 drivers/net/ethernet/qlogic/qed/qed_debug.c static u32(*cond_arr[]) (const u32 *r, const u32 *imm) = { imm 207 drivers/scsi/csiostor/csio_scsi.c uint8_t imm = csio_hw_to_scsim(hw)->proto_cmd_len; imm 210 drivers/scsi/csiostor/csio_scsi.c FW_SCSI_CMD_WR_IMMDLEN(imm)); imm 367 drivers/scsi/csiostor/csio_scsi.c uint8_t imm = csio_hw_to_scsim(hw)->proto_cmd_len; imm 371 drivers/scsi/csiostor/csio_scsi.c FW_SCSI_READ_WR_IMMDLEN(imm)); imm 398 drivers/scsi/csiostor/csio_scsi.c sizeof(struct fw_scsi_read_wr) + ALIGN(imm, 16)); imm 420 drivers/scsi/csiostor/csio_scsi.c uint8_t imm = csio_hw_to_scsim(hw)->proto_cmd_len; imm 424 drivers/scsi/csiostor/csio_scsi.c FW_SCSI_WRITE_WR_IMMDLEN(imm)); imm 451 drivers/scsi/csiostor/csio_scsi.c sizeof(struct fw_scsi_write_wr) + ALIGN(imm, 16)); imm 458 drivers/scsi/csiostor/csio_scsi.c #define CSIO_SCSI_DATA_WRSZ(req, oper, sz, imm) \ imm 461 drivers/scsi/csiostor/csio_scsi.c ALIGN((imm), 16) + /* Immed data */ \ imm 676 drivers/scsi/cxgbi/cxgb4i/cxgb4i.c bool imm = is_ofld_imm(skb); imm 680 drivers/scsi/cxgbi/cxgb4i/cxgb4i.c if (imm) { imm 127 drivers/scsi/qla2xxx/qla_target.c struct imm_ntfy_from_isp *imm, int ha_locked); imm 3589 drivers/scsi/qla2xxx/qla_target.c struct imm_ntfy_from_isp *imm, int ha_locked) imm 3594 drivers/scsi/qla2xxx/qla_target.c rc = __qlt_send_term_imm_notif(vha, imm); imm 64 include/linux/bpf.h u64 *imm, u32 off); imm 66 include/linux/bpf.h u64 imm, u32 *off); imm 90 include/linux/filter.h .imm = 0 }) imm 98 include/linux/filter.h .imm = 0 }) imm 108 include/linux/filter.h .imm = IMM }) imm 116 include/linux/filter.h .imm = IMM }) imm 126 include/linux/filter.h .imm = LEN }) imm 136 include/linux/filter.h .imm = 0 }) imm 144 include/linux/filter.h .imm = 0 }) imm 154 include/linux/filter.h .imm = IMM }) imm 162 include/linux/filter.h .imm = IMM }) imm 171 include/linux/filter.h .imm = 1 }) imm 175 include/linux/filter.h return insn->code == (BPF_ALU | BPF_MOV | BPF_X) && insn->imm == 1; imm 188 include/linux/filter.h .imm = (__u32) (IMM) }), \ imm 194 include/linux/filter.h .imm = ((__u64) (IMM)) >> 32 }) imm 208 include/linux/filter.h .imm = IMM }) imm 216 include/linux/filter.h .imm = IMM }) imm 226 include/linux/filter.h .imm = IMM }) imm 236 include/linux/filter.h .imm = IMM }) imm 246 include/linux/filter.h .imm = 0 }) imm 256 include/linux/filter.h .imm = 0 }) imm 266 include/linux/filter.h .imm = 0 }) imm 276 include/linux/filter.h .imm = IMM }) imm 286 include/linux/filter.h .imm = 0 }) imm 296 include/linux/filter.h .imm = IMM }) imm 306 include/linux/filter.h .imm = 0 }) imm 316 include/linux/filter.h .imm = IMM }) imm 326 include/linux/filter.h .imm = 0 }) imm 336 include/linux/filter.h .imm = TGT }) imm 349 include/linux/filter.h .imm = ((FUNC) - __bpf_call_base) }) imm 359 include/linux/filter.h .imm = IMM }) imm 369 include/linux/filter.h .imm = 0 }) imm 326 include/linux/mlx4/qp.h __be32 imm; imm 209 include/linux/mlx5/qp.h __be32 imm; imm 304 include/linux/sched/signal.h ___ARCH_SI_IA64(int imm, unsigned int flags, unsigned long isr) imm 308 include/linux/sched/signal.h ___ARCH_SI_IA64(int imm, unsigned int flags, unsigned long isr)); imm 311 include/linux/sched/signal.h ___ARCH_SI_IA64(int imm, unsigned int flags, unsigned long isr) imm 70 include/uapi/linux/bpf.h __s32 imm; /* signed immediate constant */ imm 154 kernel/bpf/arraymap.c static int array_map_direct_value_addr(const struct bpf_map *map, u64 *imm, imm 164 kernel/bpf/arraymap.c *imm = (unsigned long)array->value; imm 168 kernel/bpf/arraymap.c static int array_map_direct_value_meta(const struct bpf_map *map, u64 imm, imm 177 kernel/bpf/arraymap.c if (imm < base || imm >= base + range) imm 180 kernel/bpf/arraymap.c *off = imm - base; imm 56 kernel/bpf/core.c #define IMM insn->imm imm 294 kernel/bpf/core.c dst[i].imm = 0; imm 301 kernel/bpf/core.c dst[i].imm = 0; imm 341 kernel/bpf/core.c s64 imm = insn->imm; imm 343 kernel/bpf/core.c if (curr < pos && curr + imm + 1 >= end_old) imm 344 kernel/bpf/core.c imm += delta; imm 345 kernel/bpf/core.c else if (curr >= end_new && curr + imm + 1 < end_new) imm 346 kernel/bpf/core.c imm -= delta; imm 347 kernel/bpf/core.c if (imm < imm_min || imm > imm_max) imm 350 kernel/bpf/core.c insn->imm = imm; imm 862 kernel/bpf/core.c s32 imm = insn->imm; imm 884 kernel/bpf/core.c addr = (u8 *)__bpf_call_base + imm; imm 923 kernel/bpf/core.c if (from->imm == 0 && imm 940 kernel/bpf/core.c *to++ = BPF_ALU32_IMM(BPF_MOV, BPF_REG_AX, imm_rnd ^ from->imm); imm 954 kernel/bpf/core.c *to++ = BPF_ALU64_IMM(BPF_MOV, BPF_REG_AX, imm_rnd ^ from->imm); imm 974 kernel/bpf/core.c *to++ = BPF_ALU64_IMM(BPF_MOV, BPF_REG_AX, imm_rnd ^ from->imm); imm 994 kernel/bpf/core.c *to++ = BPF_ALU32_IMM(BPF_MOV, BPF_REG_AX, imm_rnd ^ from->imm); imm 1001 kernel/bpf/core.c *to++ = BPF_ALU64_IMM(BPF_MOV, BPF_REG_AX, imm_rnd ^ aux[1].imm); imm 1007 kernel/bpf/core.c *to++ = BPF_ALU32_IMM(BPF_MOV, BPF_REG_AX, imm_rnd ^ aux[0].imm); imm 1018 kernel/bpf/core.c *to++ = BPF_ALU64_IMM(BPF_MOV, BPF_REG_AX, imm_rnd ^ from->imm); imm 1367 kernel/bpf/core.c DST = (u64) (u32) insn[0].imm | ((u64) (u32) insn[1].imm) << 32; imm 1447 kernel/bpf/core.c BPF_R0 = (__bpf_call_base + insn->imm)(BPF_R1, BPF_R2, BPF_R3, imm 1452 kernel/bpf/core.c BPF_R0 = (__bpf_call_base_args + insn->imm)(BPF_R1, BPF_R2, imm 1631 kernel/bpf/core.c insn->off = (s16) insn->imm; imm 1632 kernel/bpf/core.c insn->imm = interpreters_args[(round_up(stack_depth, 32) / 32) - 1] - imm 23 kernel/bpf/disasm.c insn->imm >= 0 && insn->imm < __BPF_FUNC_MAX_ID && imm 24 kernel/bpf/disasm.c func_id_str[insn->imm]) imm 25 kernel/bpf/disasm.c return func_id_str[insn->imm]; imm 31 kernel/bpf/disasm.c snprintf(buff, len, "%+d", insn->imm); imm 114 kernel/bpf/disasm.c insn->imm, insn->dst_reg); imm 147 kernel/bpf/disasm.c insn->imm); imm 173 kernel/bpf/disasm.c insn->off, insn->imm); imm 188 kernel/bpf/disasm.c insn->imm); imm 193 kernel/bpf/disasm.c insn->src_reg, insn->imm); imm 199 kernel/bpf/disasm.c u64 imm = ((u64)(insn + 1)->imm << 32) | (u32)insn->imm; imm 205 kernel/bpf/disasm.c imm = 0; imm 209 kernel/bpf/disasm.c __func_imm_name(cbs, insn, imm, imm 231 kernel/bpf/disasm.c insn->imm); imm 252 kernel/bpf/disasm.c insn->imm, insn->off); imm 2255 kernel/bpf/syscall.c u64 imm; imm 2266 kernel/bpf/syscall.c insns[i].imm = BPF_FUNC_tail_call; imm 2274 kernel/bpf/syscall.c insns[i].imm = 0; imm 2281 kernel/bpf/syscall.c imm = ((u64)insns[i + 1].imm << 32) | (u32)insns[i].imm; imm 2282 kernel/bpf/syscall.c map = bpf_map_from_imm(prog, imm, &off, &type); imm 2285 kernel/bpf/syscall.c insns[i].imm = map->id; imm 2286 kernel/bpf/syscall.c insns[i + 1].imm = off; imm 860 kernel/bpf/verifier.c static void __mark_reg_known(struct bpf_reg_state *reg, u64 imm) imm 865 kernel/bpf/verifier.c reg->var_off = tnum_const(imm); imm 866 kernel/bpf/verifier.c reg->smin_value = (s64)imm; imm 867 kernel/bpf/verifier.c reg->smax_value = (s64)imm; imm 868 kernel/bpf/verifier.c reg->umin_value = imm; imm 869 kernel/bpf/verifier.c reg->umax_value = imm; imm 1143 kernel/bpf/verifier.c ret = add_subprog(env, i + insn[i].imm + 1); imm 1281 kernel/bpf/verifier.c (class == BPF_ALU && op == BPF_END && insn->imm == 64)) imm 2636 kernel/bpf/verifier.c i = i + insn[i].imm + 1; imm 2667 kernel/bpf/verifier.c int start = idx + insn->imm + 1, subprog; imm 2902 kernel/bpf/verifier.c insn->imm != 0) { imm 3772 kernel/bpf/verifier.c target_insn = *insn_idx + insn->imm; imm 4939 kernel/bpf/verifier.c __mark_reg_known(&off_reg, insn->imm); imm 4971 kernel/bpf/verifier.c insn->off != 0 || insn->imm != 0) { imm 4977 kernel/bpf/verifier.c (insn->imm != 16 && insn->imm != 32 && insn->imm != 64) || imm 5003 kernel/bpf/verifier.c if (insn->imm != 0 || insn->off != 0) { imm 5061 kernel/bpf/verifier.c insn->imm); imm 5064 kernel/bpf/verifier.c (u32)insn->imm); imm 5075 kernel/bpf/verifier.c if (insn->imm != 0 || insn->off != 0) { imm 5096 kernel/bpf/verifier.c BPF_SRC(insn->code) == BPF_K && insn->imm == 0) { imm 5105 kernel/bpf/verifier.c if (insn->imm < 0 || insn->imm >= size) { imm 5106 kernel/bpf/verifier.c verbose(env, "invalid shift %d\n", insn->imm); imm 5885 kernel/bpf/verifier.c if (insn->imm != 0) { imm 5917 kernel/bpf/verifier.c pred = is_branch_taken(dst_reg, insn->imm, imm 5992 kernel/bpf/verifier.c dst_reg, insn->imm, opcode, is_jmp32); imm 6000 kernel/bpf/verifier.c insn->imm == 0 && (opcode == BPF_JEQ || opcode == BPF_JNE) && imm 6043 kernel/bpf/verifier.c u64 imm = ((u64)(insn + 1)->imm << 32) | (u32)insn->imm; imm 6046 kernel/bpf/verifier.c __mark_reg_known(®s[insn->dst_reg], imm); imm 6399 kernel/bpf/verifier.c ret = push_insn(t, t + insns[t].imm + 1, BRANCH, imm 7788 kernel/bpf/verifier.c insn->imm != BPF_FUNC_spin_unlock)) { imm 7795 kernel/bpf/verifier.c err = check_helper_call(env, insn->imm, env->insn_idx); imm 7801 kernel/bpf/verifier.c insn->imm != 0 || imm 7814 kernel/bpf/verifier.c insn->imm != 0 || imm 7986 kernel/bpf/verifier.c (BPF_MODE(insn->code) != BPF_MEM || insn->imm != 0)) { imm 7993 kernel/bpf/verifier.c BPF_MODE(insn->code) != BPF_XADD) || insn->imm != 0)) { imm 8021 kernel/bpf/verifier.c insn[1].imm != 0)) { imm 8027 kernel/bpf/verifier.c f = fdget(insn[0].imm); imm 8031 kernel/bpf/verifier.c insn[0].imm); imm 8045 kernel/bpf/verifier.c u32 off = insn[1].imm; imm 8071 kernel/bpf/verifier.c insn[0].imm = (u32)addr; imm 8072 kernel/bpf/verifier.c insn[1].imm = addr >> 32; imm 8537 kernel/bpf/verifier.c rnd_hi32_patch[1].imm = imm_rnd; imm 8761 kernel/bpf/verifier.c subprog = find_subprog(env, i + insn->imm + 1); imm 8764 kernel/bpf/verifier.c i + insn->imm + 1); imm 8774 kernel/bpf/verifier.c env->insn_aux_data[i].call_imm = insn->imm; imm 8776 kernel/bpf/verifier.c insn->imm = 1; imm 8841 kernel/bpf/verifier.c insn->imm = BPF_CAST_CALL(func[subprog]->bpf_func) - imm 8888 kernel/bpf/verifier.c insn->imm = subprog; imm 8910 kernel/bpf/verifier.c insn->imm = env->insn_aux_data[i].call_imm; imm 9083 kernel/bpf/verifier.c if (insn->imm == BPF_FUNC_get_route_realm) imm 9085 kernel/bpf/verifier.c if (insn->imm == BPF_FUNC_get_prandom_u32) imm 9087 kernel/bpf/verifier.c if (insn->imm == BPF_FUNC_override_return) imm 9089 kernel/bpf/verifier.c if (insn->imm == BPF_FUNC_tail_call) { imm 9104 kernel/bpf/verifier.c insn->imm = 0; imm 9146 kernel/bpf/verifier.c (insn->imm == BPF_FUNC_map_lookup_elem || imm 9147 kernel/bpf/verifier.c insn->imm == BPF_FUNC_map_update_elem || imm 9148 kernel/bpf/verifier.c insn->imm == BPF_FUNC_map_delete_elem || imm 9149 kernel/bpf/verifier.c insn->imm == BPF_FUNC_map_push_elem || imm 9150 kernel/bpf/verifier.c insn->imm == BPF_FUNC_map_pop_elem || imm 9151 kernel/bpf/verifier.c insn->imm == BPF_FUNC_map_peek_elem)) { imm 9158 kernel/bpf/verifier.c if (insn->imm == BPF_FUNC_map_lookup_elem && imm 9192 kernel/bpf/verifier.c switch (insn->imm) { imm 9194 kernel/bpf/verifier.c insn->imm = BPF_CAST_CALL(ops->map_lookup_elem) - imm 9198 kernel/bpf/verifier.c insn->imm = BPF_CAST_CALL(ops->map_update_elem) - imm 9202 kernel/bpf/verifier.c insn->imm = BPF_CAST_CALL(ops->map_delete_elem) - imm 9206 kernel/bpf/verifier.c insn->imm = BPF_CAST_CALL(ops->map_push_elem) - imm 9210 kernel/bpf/verifier.c insn->imm = BPF_CAST_CALL(ops->map_pop_elem) - imm 9214 kernel/bpf/verifier.c insn->imm = BPF_CAST_CALL(ops->map_peek_elem) - imm 9223 kernel/bpf/verifier.c fn = env->ops->get_func_proto(insn->imm, env->prog); imm 9230 kernel/bpf/verifier.c func_id_name(insn->imm), insn->imm); imm 9233 kernel/bpf/verifier.c insn->imm = fn->func - __bpf_call_base; imm 1658 kernel/signal.c ___ARCH_SI_IA64(int imm, unsigned int flags, unsigned long isr) imm 1672 kernel/signal.c info.si_imm = imm; imm 1681 kernel/signal.c ___ARCH_SI_IA64(int imm, unsigned int flags, unsigned long isr)) imm 1685 kernel/signal.c ___ARCH_SI_IA64(imm, flags, isr), current); imm 1690 kernel/signal.c ___ARCH_SI_IA64(int imm, unsigned int flags, unsigned long isr) imm 1704 kernel/signal.c info.si_imm = imm; imm 329 kernel/trace/trace_probe.c static int str_to_immediate(char *str, unsigned long *imm) imm 332 kernel/trace/trace_probe.c return kstrtoul(str, 0, imm); imm 334 kernel/trace/trace_probe.c return kstrtol(str, 0, (long *)imm); imm 336 kernel/trace/trace_probe.c return kstrtol(str + 1, 0, (long *)imm); imm 708 net/core/filter.c insn->imm = fp->k; imm 6790 net/core/filter.c *insn++ = BPF_MOV64_IMM(BPF_REG_2, orig->imm); imm 6793 net/core/filter.c if (orig->imm) imm 6794 net/core/filter.c *insn++ = BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, orig->imm); imm 16 samples/bpf/bpf_insn.h .imm = 0 }) imm 24 samples/bpf/bpf_insn.h .imm = 0 }) imm 34 samples/bpf/bpf_insn.h .imm = IMM }) imm 42 samples/bpf/bpf_insn.h .imm = IMM }) imm 52 samples/bpf/bpf_insn.h .imm = 0 }) imm 60 samples/bpf/bpf_insn.h .imm = 0 }) imm 70 samples/bpf/bpf_insn.h .imm = IMM }) imm 78 samples/bpf/bpf_insn.h .imm = IMM }) imm 90 samples/bpf/bpf_insn.h .imm = (__u32) (IMM) }), \ imm 96 samples/bpf/bpf_insn.h .imm = ((__u64) (IMM)) >> 32 }) imm 115 samples/bpf/bpf_insn.h .imm = IMM }) imm 125 samples/bpf/bpf_insn.h .imm = 0 }) imm 135 samples/bpf/bpf_insn.h .imm = 0 }) imm 145 samples/bpf/bpf_insn.h .imm = 0 }) imm 155 samples/bpf/bpf_insn.h .imm = IMM }) imm 165 samples/bpf/bpf_insn.h .imm = 0 }) imm 175 samples/bpf/bpf_insn.h .imm = 0 }) imm 185 samples/bpf/bpf_insn.h .imm = IMM }) imm 195 samples/bpf/bpf_insn.h .imm = IMM }) imm 205 samples/bpf/bpf_insn.h .imm = IMM }) imm 215 samples/bpf/bpf_insn.h .imm = 0 }) imm 375 samples/bpf/bpf_load.c insn[insn_idx].imm = maps[map_idx].fd; imm 84 tools/arch/x86/include/asm/inat.h #define INAT_MAKE_IMM(imm) (imm << INAT_IMM_OFFS) imm 173 tools/bpf/bpftool/xlated_dumper.c unsigned long address = dd->address_call_base + insn->imm; imm 177 tools/bpf/bpftool/xlated_dumper.c (__u32) insn->imm < dd->nr_jited_ksyms && dd->jited_ksyms) imm 178 tools/bpf/bpftool/xlated_dumper.c address = dd->jited_ksyms[insn->imm]; imm 195 tools/bpf/bpftool/xlated_dumper.c "map[id:%u]", insn->imm); imm 198 tools/bpf/bpftool/xlated_dumper.c "map[id:%u][0]+%u", insn->imm, (insn + 1)->imm); imm 278 tools/bpf/bpftool/xlated_dumper.c print_hex_data_json((uint8_t *)(&insn[i].imm), imm 281 tools/bpf/bpftool/xlated_dumper.c print_hex_data_json((uint8_t *)(&insn[i].imm), imm 40 tools/include/linux/filter.h .imm = 0 }) imm 48 tools/include/linux/filter.h .imm = 0 }) imm 58 tools/include/linux/filter.h .imm = IMM }) imm 66 tools/include/linux/filter.h .imm = IMM }) imm 76 tools/include/linux/filter.h .imm = LEN }) imm 86 tools/include/linux/filter.h .imm = 0 }) imm 94 tools/include/linux/filter.h .imm = 0 }) imm 104 tools/include/linux/filter.h .imm = IMM }) imm 112 tools/include/linux/filter.h .imm = IMM }) imm 122 tools/include/linux/filter.h .imm = IMM }) imm 130 tools/include/linux/filter.h .imm = IMM }) imm 140 tools/include/linux/filter.h .imm = IMM }) imm 150 tools/include/linux/filter.h .imm = IMM }) imm 160 tools/include/linux/filter.h .imm = 0 }) imm 170 tools/include/linux/filter.h .imm = 0 }) imm 180 tools/include/linux/filter.h .imm = 0 }) imm 190 tools/include/linux/filter.h .imm = IMM }) imm 200 tools/include/linux/filter.h .imm = 0 }) imm 210 tools/include/linux/filter.h .imm = 0 }) imm 220 tools/include/linux/filter.h .imm = IMM }) imm 230 tools/include/linux/filter.h .imm = IMM }) imm 240 tools/include/linux/filter.h .imm = 0 }) imm 250 tools/include/linux/filter.h .imm = ((FUNC) - BPF_FUNC_unspec) }) imm 260 tools/include/linux/filter.h .imm = IMM }) imm 273 tools/include/linux/filter.h .imm = (__u32) (IMM) }), \ imm 279 tools/include/linux/filter.h .imm = ((__u64) (IMM)) >> 32 }) imm 287 tools/include/linux/filter.h .imm = IMM1 }), \ imm 293 tools/include/linux/filter.h .imm = IMM2 }) imm 313 tools/include/linux/filter.h .imm = TGT }) imm 323 tools/include/linux/filter.h .imm = 0 }) imm 70 tools/include/uapi/linux/bpf.h __s32 imm; /* signed immediate constant */ imm 2019 tools/lib/bpf/libbpf.c insns[0].imm = map; imm 2808 tools/lib/bpf/libbpf.c if (insn->imm != orig_off) imm 2810 tools/lib/bpf/libbpf.c insn->imm = new_off; imm 2818 tools/lib/bpf/libbpf.c insn->off, insn->imm); imm 3244 tools/lib/bpf/libbpf.c insn->imm += prog->main_prog_cnt - relo->insn_idx; imm 3286 tools/lib/bpf/libbpf.c insns[insn_idx + 1].imm = insns[insn_idx].imm; imm 3288 tools/lib/bpf/libbpf.c insns[insn_idx].imm = obj->maps[map_idx].fd; imm 129 tools/lib/bpf/libbpf_probes.c insns[0].imm = 2; imm 603 tools/testing/selftests/bpf/test_align.c if (fp[len].code != 0 || fp[len].imm != 0) imm 5799 tools/testing/selftests/bpf/test_btf.c if (fp[len].code != 0 || fp[len].imm != 0) imm 67 tools/testing/selftests/bpf/test_cgroup_storage.c prog[0].imm = percpu_map_fd; imm 68 tools/testing/selftests/bpf/test_cgroup_storage.c prog[7].imm = map_fd; imm 77 tools/testing/selftests/bpf/test_lru_map.c insns[0].imm = mfd; imm 323 tools/testing/selftests/bpf/test_sock.c if (fp[len].code != 0 || fp[len].imm != 0) imm 1382 tools/testing/selftests/bpf/test_sysctl.c if (fp[len].code != 0 || fp[len].imm != 0) imm 1405 tools/testing/selftests/bpf/test_sysctl.c prog[insn_num].imm = (uint32_t)value.num; imm 1406 tools/testing/selftests/bpf/test_sysctl.c prog[insn_num + 1].imm = (uint32_t)(value.num >> 32); imm 362 tools/testing/selftests/bpf/test_verifier.c if (fp[len].code != 0 || fp[len].imm != 0) imm 653 tools/testing/selftests/bpf/test_verifier.c prog[*fixup_map_hash_8b].imm = map_fds[0]; imm 662 tools/testing/selftests/bpf/test_verifier.c prog[*fixup_map_hash_48b].imm = map_fds[1]; imm 671 tools/testing/selftests/bpf/test_verifier.c prog[*fixup_map_hash_16b].imm = map_fds[2]; imm 681 tools/testing/selftests/bpf/test_verifier.c prog[*fixup_map_array_48b].imm = map_fds[3]; imm 689 tools/testing/selftests/bpf/test_verifier.c prog[*fixup_prog1].imm = map_fds[4]; imm 697 tools/testing/selftests/bpf/test_verifier.c prog[*fixup_prog2].imm = map_fds[5]; imm 705 tools/testing/selftests/bpf/test_verifier.c prog[*fixup_map_in_map].imm = map_fds[6]; imm 713 tools/testing/selftests/bpf/test_verifier.c prog[*fixup_cgroup_storage].imm = map_fds[7]; imm 721 tools/testing/selftests/bpf/test_verifier.c prog[*fixup_percpu_cgroup_storage].imm = map_fds[8]; imm 729 tools/testing/selftests/bpf/test_verifier.c prog[*fixup_map_sockmap].imm = map_fds[9]; imm 737 tools/testing/selftests/bpf/test_verifier.c prog[*fixup_map_sockhash].imm = map_fds[10]; imm 745 tools/testing/selftests/bpf/test_verifier.c prog[*fixup_map_xskmap].imm = map_fds[11]; imm 753 tools/testing/selftests/bpf/test_verifier.c prog[*fixup_map_stacktrace].imm = map_fds[12]; imm 760 tools/testing/selftests/bpf/test_verifier.c prog[*fixup_map_spin_lock].imm = map_fds[13]; imm 770 tools/testing/selftests/bpf/test_verifier.c prog[*fixup_map_array_ro].imm = map_fds[14]; imm 780 tools/testing/selftests/bpf/test_verifier.c prog[*fixup_map_array_wo].imm = map_fds[15]; imm 789 tools/testing/selftests/bpf/test_verifier.c prog[*fixup_map_array_small].imm = map_fds[16]; imm 796 tools/testing/selftests/bpf/test_verifier.c prog[*fixup_sk_storage_map].imm = map_fds[17]; imm 804 tools/testing/selftests/bpf/test_verifier.c prog[*fixup_map_event_output].imm = map_fds[18]; imm 187 tools/testing/selftests/bpf/verifier/basic_instr.c .imm = 32,