1
2
3
4
5
6
7 #ifndef _BPF_JIT_H
8 #define _BPF_JIT_H
9
10 #include <asm/insn.h>
11
12
13 #define A64_R(x) AARCH64_INSN_REG_##x
14 #define A64_FP AARCH64_INSN_REG_FP
15 #define A64_LR AARCH64_INSN_REG_LR
16 #define A64_ZR AARCH64_INSN_REG_ZR
17 #define A64_SP AARCH64_INSN_REG_SP
18
19 #define A64_VARIANT(sf) \
20 ((sf) ? AARCH64_INSN_VARIANT_64BIT : AARCH64_INSN_VARIANT_32BIT)
21
22
23 #define A64_COMP_BRANCH(sf, Rt, offset, type) \
24 aarch64_insn_gen_comp_branch_imm(0, offset, Rt, A64_VARIANT(sf), \
25 AARCH64_INSN_BRANCH_COMP_##type)
26 #define A64_CBZ(sf, Rt, imm19) A64_COMP_BRANCH(sf, Rt, (imm19) << 2, ZERO)
27 #define A64_CBNZ(sf, Rt, imm19) A64_COMP_BRANCH(sf, Rt, (imm19) << 2, NONZERO)
28
29
30 #define A64_COND_BRANCH(cond, offset) \
31 aarch64_insn_gen_cond_branch_imm(0, offset, cond)
32 #define A64_COND_EQ AARCH64_INSN_COND_EQ
33 #define A64_COND_NE AARCH64_INSN_COND_NE
34 #define A64_COND_CS AARCH64_INSN_COND_CS
35 #define A64_COND_HI AARCH64_INSN_COND_HI
36 #define A64_COND_LS AARCH64_INSN_COND_LS
37 #define A64_COND_CC AARCH64_INSN_COND_CC
38 #define A64_COND_GE AARCH64_INSN_COND_GE
39 #define A64_COND_GT AARCH64_INSN_COND_GT
40 #define A64_COND_LE AARCH64_INSN_COND_LE
41 #define A64_COND_LT AARCH64_INSN_COND_LT
42 #define A64_B_(cond, imm19) A64_COND_BRANCH(cond, (imm19) << 2)
43
44
45 #define A64_BRANCH(offset, type) aarch64_insn_gen_branch_imm(0, offset, \
46 AARCH64_INSN_BRANCH_##type)
47 #define A64_B(imm26) A64_BRANCH((imm26) << 2, NOLINK)
48 #define A64_BL(imm26) A64_BRANCH((imm26) << 2, LINK)
49
50
51 #define A64_BR(Rn) aarch64_insn_gen_branch_reg(Rn, AARCH64_INSN_BRANCH_NOLINK)
52 #define A64_BLR(Rn) aarch64_insn_gen_branch_reg(Rn, AARCH64_INSN_BRANCH_LINK)
53 #define A64_RET(Rn) aarch64_insn_gen_branch_reg(Rn, AARCH64_INSN_BRANCH_RETURN)
54
55
56 #define A64_LS_REG(Rt, Rn, Rm, size, type) \
57 aarch64_insn_gen_load_store_reg(Rt, Rn, Rm, \
58 AARCH64_INSN_SIZE_##size, \
59 AARCH64_INSN_LDST_##type##_REG_OFFSET)
60 #define A64_STRB(Wt, Xn, Xm) A64_LS_REG(Wt, Xn, Xm, 8, STORE)
61 #define A64_LDRB(Wt, Xn, Xm) A64_LS_REG(Wt, Xn, Xm, 8, LOAD)
62 #define A64_STRH(Wt, Xn, Xm) A64_LS_REG(Wt, Xn, Xm, 16, STORE)
63 #define A64_LDRH(Wt, Xn, Xm) A64_LS_REG(Wt, Xn, Xm, 16, LOAD)
64 #define A64_STR32(Wt, Xn, Xm) A64_LS_REG(Wt, Xn, Xm, 32, STORE)
65 #define A64_LDR32(Wt, Xn, Xm) A64_LS_REG(Wt, Xn, Xm, 32, LOAD)
66 #define A64_STR64(Xt, Xn, Xm) A64_LS_REG(Xt, Xn, Xm, 64, STORE)
67 #define A64_LDR64(Xt, Xn, Xm) A64_LS_REG(Xt, Xn, Xm, 64, LOAD)
68
69
70 #define A64_LS_PAIR(Rt, Rt2, Rn, offset, ls, type) \
71 aarch64_insn_gen_load_store_pair(Rt, Rt2, Rn, offset, \
72 AARCH64_INSN_VARIANT_64BIT, \
73 AARCH64_INSN_LDST_##ls##_PAIR_##type)
74
75 #define A64_PUSH(Rt, Rt2, Rn) A64_LS_PAIR(Rt, Rt2, Rn, -16, STORE, PRE_INDEX)
76
77 #define A64_POP(Rt, Rt2, Rn) A64_LS_PAIR(Rt, Rt2, Rn, 16, LOAD, POST_INDEX)
78
79
80 #define A64_SIZE(sf) \
81 ((sf) ? AARCH64_INSN_SIZE_64 : AARCH64_INSN_SIZE_32)
82 #define A64_LSX(sf, Rt, Rn, Rs, type) \
83 aarch64_insn_gen_load_store_ex(Rt, Rn, Rs, A64_SIZE(sf), \
84 AARCH64_INSN_LDST_##type)
85
86 #define A64_LDXR(sf, Rt, Rn) \
87 A64_LSX(sf, Rt, Rn, A64_ZR, LOAD_EX)
88
89 #define A64_STXR(sf, Rt, Rn, Rs) \
90 A64_LSX(sf, Rt, Rn, Rs, STORE_EX)
91
92
93 #define A64_STADD(sf, Rn, Rs) \
94 aarch64_insn_gen_stadd(Rn, Rs, A64_SIZE(sf))
95
96
97 #define A64_ADDSUB_IMM(sf, Rd, Rn, imm12, type) \
98 aarch64_insn_gen_add_sub_imm(Rd, Rn, imm12, \
99 A64_VARIANT(sf), AARCH64_INSN_ADSB_##type)
100
101 #define A64_ADD_I(sf, Rd, Rn, imm12) A64_ADDSUB_IMM(sf, Rd, Rn, imm12, ADD)
102 #define A64_SUB_I(sf, Rd, Rn, imm12) A64_ADDSUB_IMM(sf, Rd, Rn, imm12, SUB)
103
104 #define A64_MOV(sf, Rd, Rn) A64_ADD_I(sf, Rd, Rn, 0)
105
106
107 #define A64_BITFIELD(sf, Rd, Rn, immr, imms, type) \
108 aarch64_insn_gen_bitfield(Rd, Rn, immr, imms, \
109 A64_VARIANT(sf), AARCH64_INSN_BITFIELD_MOVE_##type)
110
111 #define A64_SBFM(sf, Rd, Rn, ir, is) A64_BITFIELD(sf, Rd, Rn, ir, is, SIGNED)
112
113 #define A64_UBFM(sf, Rd, Rn, ir, is) A64_BITFIELD(sf, Rd, Rn, ir, is, UNSIGNED)
114
115
116 #define A64_LSL(sf, Rd, Rn, shift) ({ \
117 int sz = (sf) ? 64 : 32; \
118 A64_UBFM(sf, Rd, Rn, (unsigned)-(shift) % sz, sz - 1 - (shift)); \
119 })
120
121 #define A64_LSR(sf, Rd, Rn, shift) A64_UBFM(sf, Rd, Rn, shift, (sf) ? 63 : 31)
122
123 #define A64_ASR(sf, Rd, Rn, shift) A64_SBFM(sf, Rd, Rn, shift, (sf) ? 63 : 31)
124
125
126 #define A64_UXTH(sf, Rd, Rn) A64_UBFM(sf, Rd, Rn, 0, 15)
127 #define A64_UXTW(sf, Rd, Rn) A64_UBFM(sf, Rd, Rn, 0, 31)
128
129
130 #define A64_MOVEW(sf, Rd, imm16, shift, type) \
131 aarch64_insn_gen_movewide(Rd, imm16, shift, \
132 A64_VARIANT(sf), AARCH64_INSN_MOVEWIDE_##type)
133
134
135
136 #define A64_MOVN(sf, Rd, imm16, shift) A64_MOVEW(sf, Rd, imm16, shift, INVERSE)
137 #define A64_MOVZ(sf, Rd, imm16, shift) A64_MOVEW(sf, Rd, imm16, shift, ZERO)
138 #define A64_MOVK(sf, Rd, imm16, shift) A64_MOVEW(sf, Rd, imm16, shift, KEEP)
139
140
141 #define A64_ADDSUB_SREG(sf, Rd, Rn, Rm, type) \
142 aarch64_insn_gen_add_sub_shifted_reg(Rd, Rn, Rm, 0, \
143 A64_VARIANT(sf), AARCH64_INSN_ADSB_##type)
144
145 #define A64_ADD(sf, Rd, Rn, Rm) A64_ADDSUB_SREG(sf, Rd, Rn, Rm, ADD)
146 #define A64_SUB(sf, Rd, Rn, Rm) A64_ADDSUB_SREG(sf, Rd, Rn, Rm, SUB)
147 #define A64_SUBS(sf, Rd, Rn, Rm) A64_ADDSUB_SREG(sf, Rd, Rn, Rm, SUB_SETFLAGS)
148
149 #define A64_NEG(sf, Rd, Rm) A64_SUB(sf, Rd, A64_ZR, Rm)
150
151 #define A64_CMP(sf, Rn, Rm) A64_SUBS(sf, A64_ZR, Rn, Rm)
152
153
154 #define A64_DATA1(sf, Rd, Rn, type) aarch64_insn_gen_data1(Rd, Rn, \
155 A64_VARIANT(sf), AARCH64_INSN_DATA1_##type)
156
157 #define A64_REV16(sf, Rd, Rn) A64_DATA1(sf, Rd, Rn, REVERSE_16)
158 #define A64_REV32(sf, Rd, Rn) A64_DATA1(sf, Rd, Rn, REVERSE_32)
159 #define A64_REV64(Rd, Rn) A64_DATA1(1, Rd, Rn, REVERSE_64)
160
161
162
163 #define A64_DATA2(sf, Rd, Rn, Rm, type) aarch64_insn_gen_data2(Rd, Rn, Rm, \
164 A64_VARIANT(sf), AARCH64_INSN_DATA2_##type)
165 #define A64_UDIV(sf, Rd, Rn, Rm) A64_DATA2(sf, Rd, Rn, Rm, UDIV)
166 #define A64_LSLV(sf, Rd, Rn, Rm) A64_DATA2(sf, Rd, Rn, Rm, LSLV)
167 #define A64_LSRV(sf, Rd, Rn, Rm) A64_DATA2(sf, Rd, Rn, Rm, LSRV)
168 #define A64_ASRV(sf, Rd, Rn, Rm) A64_DATA2(sf, Rd, Rn, Rm, ASRV)
169
170
171
172 #define A64_MADD(sf, Rd, Ra, Rn, Rm) aarch64_insn_gen_data3(Rd, Ra, Rn, Rm, \
173 A64_VARIANT(sf), AARCH64_INSN_DATA3_MADD)
174
175 #define A64_MSUB(sf, Rd, Ra, Rn, Rm) aarch64_insn_gen_data3(Rd, Ra, Rn, Rm, \
176 A64_VARIANT(sf), AARCH64_INSN_DATA3_MSUB)
177
178 #define A64_MUL(sf, Rd, Rn, Rm) A64_MADD(sf, Rd, A64_ZR, Rn, Rm)
179
180
181 #define A64_LOGIC_SREG(sf, Rd, Rn, Rm, type) \
182 aarch64_insn_gen_logical_shifted_reg(Rd, Rn, Rm, 0, \
183 A64_VARIANT(sf), AARCH64_INSN_LOGIC_##type)
184
185 #define A64_AND(sf, Rd, Rn, Rm) A64_LOGIC_SREG(sf, Rd, Rn, Rm, AND)
186 #define A64_ORR(sf, Rd, Rn, Rm) A64_LOGIC_SREG(sf, Rd, Rn, Rm, ORR)
187 #define A64_EOR(sf, Rd, Rn, Rm) A64_LOGIC_SREG(sf, Rd, Rn, Rm, EOR)
188 #define A64_ANDS(sf, Rd, Rn, Rm) A64_LOGIC_SREG(sf, Rd, Rn, Rm, AND_SETFLAGS)
189
190 #define A64_TST(sf, Rn, Rm) A64_ANDS(sf, A64_ZR, Rn, Rm)
191
192 #endif