1 { 2 "bpf_get_stack return R0 within range", 3 .insns = { 4 BPF_MOV64_REG(BPF_REG_6, BPF_REG_1), 5 BPF_ST_MEM(BPF_DW, BPF_REG_10, -8, 0), 6 BPF_MOV64_REG(BPF_REG_2, BPF_REG_10), 7 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8), 8 BPF_LD_MAP_FD(BPF_REG_1, 0), 9 BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_map_lookup_elem), 10 BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 28), 11 BPF_MOV64_REG(BPF_REG_7, BPF_REG_0), 12 BPF_MOV64_IMM(BPF_REG_9, sizeof(struct test_val)/2), 13 BPF_MOV64_REG(BPF_REG_1, BPF_REG_6), 14 BPF_MOV64_REG(BPF_REG_2, BPF_REG_7), 15 BPF_MOV64_IMM(BPF_REG_3, sizeof(struct test_val)/2), 16 BPF_MOV64_IMM(BPF_REG_4, 256), 17 BPF_EMIT_CALL(BPF_FUNC_get_stack), 18 BPF_MOV64_IMM(BPF_REG_1, 0), 19 BPF_MOV64_REG(BPF_REG_8, BPF_REG_0), 20 BPF_ALU64_IMM(BPF_LSH, BPF_REG_8, 32), 21 BPF_ALU64_IMM(BPF_ARSH, BPF_REG_8, 32), 22 BPF_JMP_REG(BPF_JSLT, BPF_REG_8, BPF_REG_1, 16), 23 BPF_ALU64_REG(BPF_SUB, BPF_REG_9, BPF_REG_8), 24 BPF_MOV64_REG(BPF_REG_2, BPF_REG_7), 25 BPF_ALU64_REG(BPF_ADD, BPF_REG_2, BPF_REG_8), 26 BPF_MOV64_REG(BPF_REG_1, BPF_REG_9), 27 BPF_ALU64_IMM(BPF_LSH, BPF_REG_1, 32), 28 BPF_ALU64_IMM(BPF_ARSH, BPF_REG_1, 32), 29 BPF_MOV64_REG(BPF_REG_3, BPF_REG_2), 30 BPF_ALU64_REG(BPF_ADD, BPF_REG_3, BPF_REG_1), 31 BPF_MOV64_REG(BPF_REG_1, BPF_REG_7), 32 BPF_MOV64_IMM(BPF_REG_5, sizeof(struct test_val)/2), 33 BPF_ALU64_REG(BPF_ADD, BPF_REG_1, BPF_REG_5), 34 BPF_JMP_REG(BPF_JGE, BPF_REG_3, BPF_REG_1, 4), 35 BPF_MOV64_REG(BPF_REG_1, BPF_REG_6), 36 BPF_MOV64_REG(BPF_REG_3, BPF_REG_9), 37 BPF_MOV64_IMM(BPF_REG_4, 0), 38 BPF_EMIT_CALL(BPF_FUNC_get_stack), 39 BPF_EXIT_INSN(), 40 }, 41 .fixup_map_hash_48b = { 4 }, 42 .result = ACCEPT, 43 .prog_type = BPF_PROG_TYPE_TRACEPOINT, 44 },