Lines Matching refs:sf

19 #define A64_VARIANT(sf) \
20 ((sf) ? AARCH64_INSN_VARIANT_64BIT : AARCH64_INSN_VARIANT_32BIT)
23 #define A64_COMP_BRANCH(sf, Rt, offset, type) \
24 aarch64_insn_gen_comp_branch_imm(0, offset, Rt, A64_VARIANT(sf), \
26 #define A64_CBZ(sf, Rt, imm19) A64_COMP_BRANCH(sf, Rt, (imm19) << 2, ZERO)
27 #define A64_CBNZ(sf, Rt, imm19) A64_COMP_BRANCH(sf, Rt, (imm19) << 2, NONZERO)
106 #define A64_SIZE(sf) \
107 ((sf) ? AARCH64_INSN_SIZE_64 : AARCH64_INSN_SIZE_32)
108 #define A64_LSX(sf, Rt, Rn, Rs, type) \
109 aarch64_insn_gen_load_store_ex(Rt, Rn, Rs, A64_SIZE(sf), \
112 #define A64_LDXR(sf, Rt, Rn) \
113 A64_LSX(sf, Rt, Rn, A64_ZR, LOAD_EX)
115 #define A64_STXR(sf, Rt, Rn, Rs) \
116 A64_LSX(sf, Rt, Rn, Rs, STORE_EX)
118 #define A64_STLXR(sf, Rt, Rn, Rs) \
119 aarch64_insn_gen_load_store_ex(Rt, Rn, Rs, A64_SIZE(sf), \
128 #define A64_ST_OP(sf, Rn, Rs, op) \
130 A64_SIZE(sf), AARCH64_INSN_MEM_ATOMIC_##op, \
133 #define A64_STADD(sf, Rn, Rs) A64_ST_OP(sf, Rn, Rs, ADD)
134 #define A64_STCLR(sf, Rn, Rs) A64_ST_OP(sf, Rn, Rs, CLR)
135 #define A64_STEOR(sf, Rn, Rs) A64_ST_OP(sf, Rn, Rs, EOR)
136 #define A64_STSET(sf, Rn, Rs) A64_ST_OP(sf, Rn, Rs, SET)
138 #define A64_LD_OP_AL(sf, Rt, Rn, Rs, op) \
140 A64_SIZE(sf), AARCH64_INSN_MEM_ATOMIC_##op, \
143 #define A64_LDADDAL(sf, Rt, Rn, Rs) A64_LD_OP_AL(sf, Rt, Rn, Rs, ADD)
144 #define A64_LDCLRAL(sf, Rt, Rn, Rs) A64_LD_OP_AL(sf, Rt, Rn, Rs, CLR)
145 #define A64_LDEORAL(sf, Rt, Rn, Rs) A64_LD_OP_AL(sf, Rt, Rn, Rs, EOR)
146 #define A64_LDSETAL(sf, Rt, Rn, Rs) A64_LD_OP_AL(sf, Rt, Rn, Rs, SET)
148 #define A64_SWPAL(sf, Rt, Rn, Rs) A64_LD_OP_AL(sf, Rt, Rn, Rs, SWP)
150 #define A64_CASAL(sf, Rt, Rn, Rs) \
151 aarch64_insn_gen_cas(Rt, Rn, Rs, A64_SIZE(sf), \
155 #define A64_ADDSUB_IMM(sf, Rd, Rn, imm12, type) \
157 A64_VARIANT(sf), AARCH64_INSN_ADSB_##type)
159 #define A64_ADD_I(sf, Rd, Rn, imm12) A64_ADDSUB_IMM(sf, Rd, Rn, imm12, ADD)
160 #define A64_SUB_I(sf, Rd, Rn, imm12) A64_ADDSUB_IMM(sf, Rd, Rn, imm12, SUB)
161 #define A64_ADDS_I(sf, Rd, Rn, imm12) \
162 A64_ADDSUB_IMM(sf, Rd, Rn, imm12, ADD_SETFLAGS)
163 #define A64_SUBS_I(sf, Rd, Rn, imm12) \
164 A64_ADDSUB_IMM(sf, Rd, Rn, imm12, SUB_SETFLAGS)
166 #define A64_CMN_I(sf, Rn, imm12) A64_ADDS_I(sf, A64_ZR, Rn, imm12)
168 #define A64_CMP_I(sf, Rn, imm12) A64_SUBS_I(sf, A64_ZR, Rn, imm12)
170 #define A64_MOV(sf, Rd, Rn) A64_ADD_I(sf, Rd, Rn, 0)
173 #define A64_BITFIELD(sf, Rd, Rn, immr, imms, type) \
175 A64_VARIANT(sf), AARCH64_INSN_BITFIELD_MOVE_##type)
177 #define A64_SBFM(sf, Rd, Rn, ir, is) A64_BITFIELD(sf, Rd, Rn, ir, is, SIGNED)
179 #define A64_UBFM(sf, Rd, Rn, ir, is) A64_BITFIELD(sf, Rd, Rn, ir, is, UNSIGNED)
182 #define A64_LSL(sf, Rd, Rn, shift) ({ \
183 int sz = (sf) ? 64 : 32; \
184 A64_UBFM(sf, Rd, Rn, (unsigned)-(shift) % sz, sz - 1 - (shift)); \
187 #define A64_LSR(sf, Rd, Rn, shift) A64_UBFM(sf, Rd, Rn, shift, (sf) ? 63 : 31)
189 #define A64_ASR(sf, Rd, Rn, shift) A64_SBFM(sf, Rd, Rn, shift, (sf) ? 63 : 31)
192 #define A64_UXTH(sf, Rd, Rn) A64_UBFM(sf, Rd, Rn, 0, 15)
193 #define A64_UXTW(sf, Rd, Rn) A64_UBFM(sf, Rd, Rn, 0, 31)
196 #define A64_SXTB(sf, Rd, Rn) A64_SBFM(sf, Rd, Rn, 0, 7)
197 #define A64_SXTH(sf, Rd, Rn) A64_SBFM(sf, Rd, Rn, 0, 15)
198 #define A64_SXTW(sf, Rd, Rn) A64_SBFM(sf, Rd, Rn, 0, 31)
201 #define A64_MOVEW(sf, Rd, imm16, shift, type) \
203 A64_VARIANT(sf), AARCH64_INSN_MOVEWIDE_##type)
207 #define A64_MOVN(sf, Rd, imm16, shift) A64_MOVEW(sf, Rd, imm16, shift, INVERSE)
208 #define A64_MOVZ(sf, Rd, imm16, shift) A64_MOVEW(sf, Rd, imm16, shift, ZERO)
209 #define A64_MOVK(sf, Rd, imm16, shift) A64_MOVEW(sf, Rd, imm16, shift, KEEP)
212 #define A64_ADDSUB_SREG(sf, Rd, Rn, Rm, type) \
214 A64_VARIANT(sf), AARCH64_INSN_ADSB_##type)
216 #define A64_ADD(sf, Rd, Rn, Rm) A64_ADDSUB_SREG(sf, Rd, Rn, Rm, ADD)
217 #define A64_SUB(sf, Rd, Rn, Rm) A64_ADDSUB_SREG(sf, Rd, Rn, Rm, SUB)
218 #define A64_SUBS(sf, Rd, Rn, Rm) A64_ADDSUB_SREG(sf, Rd, Rn, Rm, SUB_SETFLAGS)
220 #define A64_NEG(sf, Rd, Rm) A64_SUB(sf, Rd, A64_ZR, Rm)
222 #define A64_CMP(sf, Rn, Rm) A64_SUBS(sf, A64_ZR, Rn, Rm)
225 #define A64_DATA1(sf, Rd, Rn, type) aarch64_insn_gen_data1(Rd, Rn, \
226 A64_VARIANT(sf), AARCH64_INSN_DATA1_##type)
228 #define A64_REV16(sf, Rd, Rn) A64_DATA1(sf, Rd, Rn, REVERSE_16)
229 #define A64_REV32(sf, Rd, Rn) A64_DATA1(sf, Rd, Rn, REVERSE_32)
234 #define A64_DATA2(sf, Rd, Rn, Rm, type) aarch64_insn_gen_data2(Rd, Rn, Rm, \
235 A64_VARIANT(sf), AARCH64_INSN_DATA2_##type)
236 #define A64_UDIV(sf, Rd, Rn, Rm) A64_DATA2(sf, Rd, Rn, Rm, UDIV)
237 #define A64_SDIV(sf, Rd, Rn, Rm) A64_DATA2(sf, Rd, Rn, Rm, SDIV)
238 #define A64_LSLV(sf, Rd, Rn, Rm) A64_DATA2(sf, Rd, Rn, Rm, LSLV)
239 #define A64_LSRV(sf, Rd, Rn, Rm) A64_DATA2(sf, Rd, Rn, Rm, LSRV)
240 #define A64_ASRV(sf, Rd, Rn, Rm) A64_DATA2(sf, Rd, Rn, Rm, ASRV)
244 #define A64_MADD(sf, Rd, Ra, Rn, Rm) aarch64_insn_gen_data3(Rd, Ra, Rn, Rm, \
245 A64_VARIANT(sf), AARCH64_INSN_DATA3_MADD)
247 #define A64_MSUB(sf, Rd, Ra, Rn, Rm) aarch64_insn_gen_data3(Rd, Ra, Rn, Rm, \
248 A64_VARIANT(sf), AARCH64_INSN_DATA3_MSUB)
250 #define A64_MUL(sf, Rd, Rn, Rm) A64_MADD(sf, Rd, A64_ZR, Rn, Rm)
253 #define A64_LOGIC_SREG(sf, Rd, Rn, Rm, type) \
255 A64_VARIANT(sf), AARCH64_INSN_LOGIC_##type)
257 #define A64_AND(sf, Rd, Rn, Rm) A64_LOGIC_SREG(sf, Rd, Rn, Rm, AND)
258 #define A64_ORR(sf, Rd, Rn, Rm) A64_LOGIC_SREG(sf, Rd, Rn, Rm, ORR)
259 #define A64_EOR(sf, Rd, Rn, Rm) A64_LOGIC_SREG(sf, Rd, Rn, Rm, EOR)
260 #define A64_ANDS(sf, Rd, Rn, Rm) A64_LOGIC_SREG(sf, Rd, Rn, Rm, AND_SETFLAGS)
262 #define A64_TST(sf, Rn, Rm) A64_ANDS(sf, A64_ZR, Rn, Rm)
264 #define A64_MVN(sf, Rd, Rm) \
265 A64_LOGIC_SREG(sf, Rd, A64_ZR, Rm, ORN)
268 #define A64_LOGIC_IMM(sf, Rd, Rn, imm, type) ({ \
269 u64 imm64 = (sf) ? (u64)imm : (u64)(u32)imm; \
271 A64_VARIANT(sf), Rn, Rd, imm64); \
274 #define A64_AND_I(sf, Rd, Rn, imm) A64_LOGIC_IMM(sf, Rd, Rn, imm, AND)
275 #define A64_ORR_I(sf, Rd, Rn, imm) A64_LOGIC_IMM(sf, Rd, Rn, imm, ORR)
276 #define A64_EOR_I(sf, Rd, Rn, imm) A64_LOGIC_IMM(sf, Rd, Rn, imm, EOR)
277 #define A64_ANDS_I(sf, Rd, Rn, imm) A64_LOGIC_IMM(sf, Rd, Rn, imm, AND_SETFLAGS)
279 #define A64_TST_I(sf, Rn, imm) A64_ANDS_I(sf, A64_ZR, Rn, imm)