Lines Matching refs:ctx

93 static inline void emit(const u32 insn, struct jit_ctx *ctx)
95 if (ctx->image != NULL)
96 ctx->image[ctx->idx] = cpu_to_le32(insn);
98 ctx->idx++;
102 const s32 val, struct jit_ctx *ctx)
109 emit(A64_MOVN(is64, reg, (u16)~lo, 0), ctx);
111 emit(A64_MOVN(is64, reg, (u16)~hi, 16), ctx);
113 emit(A64_MOVK(is64, reg, lo, 0), ctx);
116 emit(A64_MOVZ(is64, reg, lo, 0), ctx);
118 emit(A64_MOVK(is64, reg, hi, 16), ctx);
131 struct jit_ctx *ctx)
138 return emit_a64_mov_i(0, reg, (u32)val, ctx);
144 emit(A64_MOVN(1, reg, (rev_tmp >> shift) & 0xffff, shift), ctx);
146 emit(A64_MOVZ(1, reg, (nrm_tmp >> shift) & 0xffff, shift), ctx);
150 emit(A64_MOVK(1, reg, (nrm_tmp >> shift) & 0xffff, shift), ctx);
155 static inline void emit_bti(u32 insn, struct jit_ctx *ctx)
158 emit(insn, ctx);
167 struct jit_ctx *ctx)
172 emit(A64_MOVN(1, reg, ~tmp & 0xffff, shift), ctx);
176 emit(A64_MOVK(1, reg, tmp & 0xffff, shift), ctx);
180 static inline void emit_call(u64 target, struct jit_ctx *ctx)
184 emit_addr_mov_i64(tmp, target, ctx);
185 emit(A64_BLR(tmp), ctx);
189 const struct jit_ctx *ctx)
198 return ctx->offset[bpf_insn + off] - (ctx->offset[bpf_insn] - 1);
217 static inline int epilogue_offset(const struct jit_ctx *ctx)
219 int to = ctx->epilogue_offset;
220 int from = ctx->idx;
297 static int build_prologue(struct jit_ctx *ctx, bool ebpf_from_cbpf,
300 const struct bpf_prog *prog = ctx->prog;
309 const int idx0 = ctx->idx;
326 * current A64_SP => +-----+ <= (BPF_FP - ctx->stack_size)
341 emit_bti(A64_BTI_JC, ctx);
343 emit(A64_MOV(1, A64_R(9), A64_LR), ctx);
344 emit(A64_NOP, ctx);
349 emit(A64_PACIASP, ctx);
351 emit(A64_PUSH(A64_FP, A64_LR, A64_SP), ctx);
352 emit(A64_MOV(1, A64_FP, A64_SP), ctx);
355 emit(A64_PUSH(r6, r7, A64_SP), ctx);
356 emit(A64_PUSH(r8, r9, A64_SP), ctx);
357 emit(A64_PUSH(fp, tcc, A64_SP), ctx);
358 emit(A64_PUSH(fpb, A64_R(28), A64_SP), ctx);
364 emit(A64_MOV(1, A64_FP, A64_R(2)), ctx);
372 emit(A64_SUB_I(1, A64_SP, A64_FP, 80), ctx);
376 emit(A64_MOV(1, fp, A64_SP), ctx);
380 emit(A64_MOVZ(1, tcc, 0, 0), ctx);
382 cur_offset = ctx->idx - idx0;
390 emit_bti(A64_BTI_J, ctx);
403 emit(A64_SUB_I(1, fp, fp, 16), ctx);
404 emit(A64_PUSH(A64_R(23), A64_R(24), A64_SP), ctx);
407 emit(A64_SUB_I(1, fpb, fp, ctx->fpb_offset), ctx);
410 ctx->stack_size = round_up(prog->aux->stack_depth, 16);
413 emit(A64_SUB_I(1, A64_SP, A64_SP, ctx->stack_size), ctx);
418 static int emit_bpf_tail_call(struct jit_ctx *ctx)
427 const int idx0 = ctx->idx;
428 #define cur_offset (ctx->idx - idx0)
436 emit_a64_mov_i64(tmp, off, ctx);
437 emit(A64_LDR32(tmp, r2, tmp), ctx);
438 emit(A64_MOV(0, r3, r3), ctx);
439 emit(A64_CMP(0, r3, tmp), ctx);
440 emit(A64_B_(A64_COND_CS, jmp_offset), ctx);
447 emit_a64_mov_i64(tmp, MAX_TAIL_CALL_CNT, ctx);
448 emit(A64_CMP(1, tcc, tmp), ctx);
449 emit(A64_B_(A64_COND_CS, jmp_offset), ctx);
450 emit(A64_ADD_I(1, tcc, tcc, 1), ctx);
457 emit_a64_mov_i64(tmp, off, ctx);
458 emit(A64_ADD(1, tmp, r2, tmp), ctx);
459 emit(A64_LSL(1, prg, r3, 3), ctx);
460 emit(A64_LDR64(prg, tmp, prg), ctx);
461 emit(A64_CBZ(1, prg, jmp_offset), ctx);
465 emit_a64_mov_i64(tmp, off, ctx);
466 emit(A64_LDR64(tmp, prg, tmp), ctx);
467 emit(A64_ADD_I(1, tmp, tmp, sizeof(u32) * PROLOGUE_OFFSET), ctx);
468 emit(A64_ADD_I(1, A64_SP, A64_SP, ctx->stack_size), ctx);
469 emit(A64_BR(tmp), ctx);
485 static int emit_lse_atomic(const struct bpf_insn *insn, struct jit_ctx *ctx)
499 emit_a64_mov_i(1, tmp, off, ctx);
500 emit(A64_ADD(1, tmp, tmp, dst), ctx);
507 emit(A64_STADD(isdw, reg, src), ctx);
510 emit(A64_MVN(isdw, tmp2, src), ctx);
511 emit(A64_STCLR(isdw, reg, tmp2), ctx);
514 emit(A64_STSET(isdw, reg, src), ctx);
517 emit(A64_STEOR(isdw, reg, src), ctx);
521 emit(A64_LDADDAL(isdw, src, reg, src), ctx);
524 emit(A64_MVN(isdw, tmp2, src), ctx);
525 emit(A64_LDCLRAL(isdw, src, reg, tmp2), ctx);
528 emit(A64_LDSETAL(isdw, src, reg, src), ctx);
531 emit(A64_LDEORAL(isdw, src, reg, src), ctx);
535 emit(A64_SWPAL(isdw, src, reg, src), ctx);
539 emit(A64_CASAL(isdw, src, reg, bpf2a64[BPF_REG_0]), ctx);
549 static inline int emit_lse_atomic(const struct bpf_insn *insn, struct jit_ctx *ctx)
555 static int emit_ll_sc_atomic(const struct bpf_insn *insn, struct jit_ctx *ctx)
563 const int i = insn - ctx->prog->insnsi;
573 emit_a64_mov_i(1, tmp, off, ctx);
574 emit(A64_ADD(1, tmp, tmp, dst), ctx);
581 emit(A64_LDXR(isdw, tmp2, reg), ctx);
583 emit(A64_ADD(isdw, tmp2, tmp2, src), ctx);
585 emit(A64_AND(isdw, tmp2, tmp2, src), ctx);
587 emit(A64_ORR(isdw, tmp2, tmp2, src), ctx);
589 emit(A64_EOR(isdw, tmp2, tmp2, src), ctx);
590 emit(A64_STXR(isdw, tmp2, reg, tmp3), ctx);
593 emit(A64_CBNZ(0, tmp3, jmp_offset), ctx);
601 emit(A64_MOV(isdw, ax, src), ctx);
602 emit(A64_LDXR(isdw, src, reg), ctx);
604 emit(A64_ADD(isdw, tmp2, src, ax), ctx);
606 emit(A64_AND(isdw, tmp2, src, ax), ctx);
608 emit(A64_ORR(isdw, tmp2, src, ax), ctx);
610 emit(A64_EOR(isdw, tmp2, src, ax), ctx);
611 emit(A64_STLXR(isdw, tmp2, reg, tmp3), ctx);
614 emit(A64_CBNZ(0, tmp3, jmp_offset), ctx);
615 emit(A64_DMB_ISH, ctx);
618 emit(A64_MOV(isdw, tmp2, src), ctx);
619 emit(A64_LDXR(isdw, src, reg), ctx);
620 emit(A64_STLXR(isdw, tmp2, reg, tmp3), ctx);
623 emit(A64_CBNZ(0, tmp3, jmp_offset), ctx);
624 emit(A64_DMB_ISH, ctx);
629 emit(A64_MOV(isdw, tmp2, r0), ctx);
630 emit(A64_LDXR(isdw, r0, reg), ctx);
631 emit(A64_EOR(isdw, tmp3, r0, tmp2), ctx);
634 emit(A64_CBNZ(isdw, tmp3, jmp_offset), ctx);
635 emit(A64_STLXR(isdw, src, reg, tmp3), ctx);
638 emit(A64_CBNZ(0, tmp3, jmp_offset), ctx);
639 emit(A64_DMB_ISH, ctx);
677 static void build_plt(struct jit_ctx *ctx)
683 if ((ctx->idx + PLT_TARGET_OFFSET / AARCH64_INSN_SIZE) % 2)
684 emit(A64_NOP, ctx);
686 plt = (struct bpf_plt *)(ctx->image + ctx->idx);
688 emit(A64_LDR64LIT(tmp, 2 * AARCH64_INSN_SIZE), ctx);
689 emit(A64_BR(tmp), ctx);
691 if (ctx->image)
695 static void build_epilogue(struct jit_ctx *ctx, bool is_exception_cb)
706 emit(A64_ADD_I(1, A64_SP, A64_SP, ctx->stack_size), ctx);
714 if (ctx->prog->aux->exception_boundary || is_exception_cb)
715 emit(A64_POP(A64_R(23), A64_R(24), A64_SP), ctx);
718 emit(A64_POP(fpb, A64_R(28), A64_SP), ctx);
720 emit(A64_POP(fp, A64_R(26), A64_SP), ctx);
723 emit(A64_POP(r8, r9, A64_SP), ctx);
724 emit(A64_POP(r6, r7, A64_SP), ctx);
727 emit(A64_POP(A64_FP, A64_LR, A64_SP), ctx);
730 emit(A64_MOV(1, A64_R(0), r0), ctx);
734 emit(A64_AUTIASP, ctx);
736 emit(A64_RET(A64_LR), ctx);
755 struct jit_ctx *ctx,
763 if (!ctx->image)
771 if (!ctx->prog->aux->extable ||
772 WARN_ON_ONCE(ctx->exentry_idx >= ctx->prog->aux->num_exentries))
775 ex = &ctx->prog->aux->extable[ctx->exentry_idx];
776 pc = (unsigned long)&ctx->ro_image[ctx->idx - 1];
809 ex = (void *)ctx->image + ((void *)ex - (void *)ctx->ro_image);
818 ctx->exentry_idx++;
828 static int build_insn(const struct bpf_insn *insn, struct jit_ctx *ctx,
840 const int i = insn - ctx->prog->insnsi;
858 emit(A64_MOV(is64, dst, src), ctx);
861 emit(A64_SXTB(is64, dst, src), ctx);
864 emit(A64_SXTH(is64, dst, src), ctx);
867 emit(A64_SXTW(is64, dst, src), ctx);
874 emit(A64_ADD(is64, dst, dst, src), ctx);
878 emit(A64_SUB(is64, dst, dst, src), ctx);
882 emit(A64_AND(is64, dst, dst, src), ctx);
886 emit(A64_ORR(is64, dst, dst, src), ctx);
890 emit(A64_EOR(is64, dst, dst, src), ctx);
894 emit(A64_MUL(is64, dst, dst, src), ctx);
899 emit(A64_UDIV(is64, dst, dst, src), ctx);
901 emit(A64_SDIV(is64, dst, dst, src), ctx);
906 emit(A64_UDIV(is64, tmp, dst, src), ctx);
908 emit(A64_SDIV(is64, tmp, dst, src), ctx);
909 emit(A64_MSUB(is64, dst, dst, tmp, src), ctx);
913 emit(A64_LSLV(is64, dst, dst, src), ctx);
917 emit(A64_LSRV(is64, dst, dst, src), ctx);
921 emit(A64_ASRV(is64, dst, dst, src), ctx);
926 emit(A64_NEG(is64, dst, dst), ctx);
941 emit(A64_REV16(is64, dst, dst), ctx);
943 emit(A64_UXTH(is64, dst, dst), ctx);
946 emit(A64_REV32(0, dst, dst), ctx);
950 emit(A64_REV64(dst, dst), ctx);
958 emit(A64_UXTH(is64, dst, dst), ctx);
962 emit(A64_UXTW(is64, dst, dst), ctx);
972 emit_a64_mov_i(is64, dst, imm, ctx);
978 emit(A64_ADD_I(is64, dst, dst, imm), ctx);
980 emit(A64_SUB_I(is64, dst, dst, -imm), ctx);
982 emit_a64_mov_i(is64, tmp, imm, ctx);
983 emit(A64_ADD(is64, dst, dst, tmp), ctx);
989 emit(A64_SUB_I(is64, dst, dst, imm), ctx);
991 emit(A64_ADD_I(is64, dst, dst, -imm), ctx);
993 emit_a64_mov_i(is64, tmp, imm, ctx);
994 emit(A64_SUB(is64, dst, dst, tmp), ctx);
1001 emit(a64_insn, ctx);
1003 emit_a64_mov_i(is64, tmp, imm, ctx);
1004 emit(A64_AND(is64, dst, dst, tmp), ctx);
1011 emit(a64_insn, ctx);
1013 emit_a64_mov_i(is64, tmp, imm, ctx);
1014 emit(A64_ORR(is64, dst, dst, tmp), ctx);
1021 emit(a64_insn, ctx);
1023 emit_a64_mov_i(is64, tmp, imm, ctx);
1024 emit(A64_EOR(is64, dst, dst, tmp), ctx);
1029 emit_a64_mov_i(is64, tmp, imm, ctx);
1030 emit(A64_MUL(is64, dst, dst, tmp), ctx);
1034 emit_a64_mov_i(is64, tmp, imm, ctx);
1036 emit(A64_UDIV(is64, dst, dst, tmp), ctx);
1038 emit(A64_SDIV(is64, dst, dst, tmp), ctx);
1042 emit_a64_mov_i(is64, tmp2, imm, ctx);
1044 emit(A64_UDIV(is64, tmp, dst, tmp2), ctx);
1046 emit(A64_SDIV(is64, tmp, dst, tmp2), ctx);
1047 emit(A64_MSUB(is64, dst, dst, tmp, tmp2), ctx);
1051 emit(A64_LSL(is64, dst, dst, imm), ctx);
1055 emit(A64_LSR(is64, dst, dst, imm), ctx);
1059 emit(A64_ASR(is64, dst, dst, imm), ctx);
1066 jmp_offset = bpf2a64_offset(i, off, ctx);
1068 jmp_offset = bpf2a64_offset(i, imm, ctx);
1070 emit(A64_B(jmp_offset), ctx);
1093 emit(A64_CMP(is64, dst, src), ctx);
1095 jmp_offset = bpf2a64_offset(i, off, ctx);
1132 emit(A64_B_(jmp_cond, jmp_offset), ctx);
1136 emit(A64_TST(is64, dst, src), ctx);
1160 emit(A64_CMP_I(is64, dst, imm), ctx);
1162 emit(A64_CMN_I(is64, dst, -imm), ctx);
1164 emit_a64_mov_i(is64, tmp, imm, ctx);
1165 emit(A64_CMP(is64, dst, tmp), ctx);
1172 emit(a64_insn, ctx);
1174 emit_a64_mov_i(is64, tmp, imm, ctx);
1175 emit(A64_TST(is64, dst, tmp), ctx);
1185 ret = bpf_jit_get_func_addr(ctx->prog, insn, extra_pass,
1189 emit_call(func_addr, ctx);
1190 emit(A64_MOV(1, r0, A64_R(0)), ctx);
1195 if (emit_bpf_tail_call(ctx))
1202 if (i == ctx->prog->len - 1)
1204 jmp_offset = epilogue_offset(ctx);
1206 emit(A64_B(jmp_offset), ctx);
1217 emit_addr_mov_i64(dst, imm64, ctx);
1219 emit_a64_mov_i64(dst, imm64, ctx);
1240 if (ctx->fpb_offset > 0 && src == fp) {
1242 off_adj = off + ctx->fpb_offset;
1253 emit(A64_LDRSWI(dst, src_adj, off_adj), ctx);
1255 emit(A64_LDR32I(dst, src_adj, off_adj), ctx);
1257 emit_a64_mov_i(1, tmp, off, ctx);
1259 emit(A64_LDRSW(dst, src, tmp), ctx);
1261 emit(A64_LDR32(dst, src, tmp), ctx);
1267 emit(A64_LDRSHI(dst, src_adj, off_adj), ctx);
1269 emit(A64_LDRHI(dst, src_adj, off_adj), ctx);
1271 emit_a64_mov_i(1, tmp, off, ctx);
1273 emit(A64_LDRSH(dst, src, tmp), ctx);
1275 emit(A64_LDRH(dst, src, tmp), ctx);
1281 emit(A64_LDRSBI(dst, src_adj, off_adj), ctx);
1283 emit(A64_LDRBI(dst, src_adj, off_adj), ctx);
1285 emit_a64_mov_i(1, tmp, off, ctx);
1287 emit(A64_LDRSB(dst, src, tmp), ctx);
1289 emit(A64_LDRB(dst, src, tmp), ctx);
1294 emit(A64_LDR64I(dst, src_adj, off_adj), ctx);
1296 emit_a64_mov_i(1, tmp, off, ctx);
1297 emit(A64_LDR64(dst, src, tmp), ctx);
1302 ret = add_exception_handler(insn, ctx, dst);
1325 if (ctx->fpb_offset > 0 && dst == fp) {
1327 off_adj = off + ctx->fpb_offset;
1333 emit_a64_mov_i(1, tmp, imm, ctx);
1337 emit(A64_STR32I(tmp, dst_adj, off_adj), ctx);
1339 emit_a64_mov_i(1, tmp2, off, ctx);
1340 emit(A64_STR32(tmp, dst, tmp2), ctx);
1345 emit(A64_STRHI(tmp, dst_adj, off_adj), ctx);
1347 emit_a64_mov_i(1, tmp2, off, ctx);
1348 emit(A64_STRH(tmp, dst, tmp2), ctx);
1353 emit(A64_STRBI(tmp, dst_adj, off_adj), ctx);
1355 emit_a64_mov_i(1, tmp2, off, ctx);
1356 emit(A64_STRB(tmp, dst, tmp2), ctx);
1361 emit(A64_STR64I(tmp, dst_adj, off_adj), ctx);
1363 emit_a64_mov_i(1, tmp2, off, ctx);
1364 emit(A64_STR64(tmp, dst, tmp2), ctx);
1375 if (ctx->fpb_offset > 0 && dst == fp) {
1377 off_adj = off + ctx->fpb_offset;
1385 emit(A64_STR32I(src, dst_adj, off_adj), ctx);
1387 emit_a64_mov_i(1, tmp, off, ctx);
1388 emit(A64_STR32(src, dst, tmp), ctx);
1393 emit(A64_STRHI(src, dst_adj, off_adj), ctx);
1395 emit_a64_mov_i(1, tmp, off, ctx);
1396 emit(A64_STRH(src, dst, tmp), ctx);
1401 emit(A64_STRBI(src, dst_adj, off_adj), ctx);
1403 emit_a64_mov_i(1, tmp, off, ctx);
1404 emit(A64_STRB(src, dst, tmp), ctx);
1409 emit(A64_STR64I(src, dst_adj, off_adj), ctx);
1411 emit_a64_mov_i(1, tmp, off, ctx);
1412 emit(A64_STR64(src, dst, tmp), ctx);
1421 ret = emit_lse_atomic(insn, ctx);
1423 ret = emit_ll_sc_atomic(insn, ctx);
1509 static int build_body(struct jit_ctx *ctx, bool extra_pass)
1511 const struct bpf_prog *prog = ctx->prog;
1527 if (ctx->image == NULL)
1528 ctx->offset[i] = ctx->idx;
1529 ret = build_insn(insn, ctx, extra_pass);
1532 if (ctx->image == NULL)
1533 ctx->offset[i] = ctx->idx;
1544 if (ctx->image == NULL)
1545 ctx->offset[i] = ctx->idx;
1550 static int validate_code(struct jit_ctx *ctx)
1554 for (i = 0; i < ctx->idx; i++) {
1555 u32 a64_insn = le32_to_cpu(ctx->image[i]);
1563 static int validate_ctx(struct jit_ctx *ctx)
1565 if (validate_code(ctx))
1568 if (WARN_ON_ONCE(ctx->exentry_idx != ctx->prog->aux->num_exentries))
1583 struct jit_ctx ctx;
1596 struct jit_ctx ctx;
1623 if (jit_data->ctx.offset) {
1624 ctx = jit_data->ctx;
1631 prog_size = sizeof(u32) * ctx.idx;
1634 memset(&ctx, 0, sizeof(ctx));
1635 ctx.prog = prog;
1637 ctx.offset = kvcalloc(prog->len + 1, sizeof(int), GFP_KERNEL);
1638 if (ctx.offset == NULL) {
1643 ctx.fpb_offset = find_fpb_offset(prog);
1646 * 1. Initial fake pass to compute ctx->idx and ctx->offset.
1648 * BPF line info needs ctx->offset[i] to be the offset of
1651 if (build_prologue(&ctx, was_classic, prog->aux->exception_cb)) {
1656 if (build_body(&ctx, extra_pass)) {
1661 ctx.epilogue_offset = ctx.idx;
1662 build_epilogue(&ctx, prog->aux->exception_cb);
1663 build_plt(&ctx);
1670 prog_size = sizeof(u32) * ctx.idx;
1691 ctx.image = (__le32 *)image_ptr;
1692 ctx.ro_image = (__le32 *)ro_image_ptr;
1696 ctx.idx = 0;
1697 ctx.exentry_idx = 0;
1699 build_prologue(&ctx, was_classic, prog->aux->exception_cb);
1701 if (build_body(&ctx, extra_pass)) {
1706 build_epilogue(&ctx, prog->aux->exception_cb);
1707 build_plt(&ctx);
1710 if (validate_ctx(&ctx)) {
1717 bpf_jit_dump(prog->len, prog_size, 2, ctx.image);
1720 if (extra_pass && ctx.idx != jit_data->ctx.idx) {
1722 ctx.idx, jit_data->ctx.idx);
1740 bpf_flush_icache(ro_header, ctx.ro_image + ctx.idx);
1742 jit_data->ctx = ctx;
1748 prog->bpf_func = (void *)ctx.ro_image;
1757 ctx.offset[i] *= AARCH64_INSN_SIZE;
1758 bpf_prog_fill_jited_linfo(prog, ctx.offset + 1);
1760 kvfree(ctx.offset);
1813 static void invoke_bpf_prog(struct jit_ctx *ctx, struct bpf_tramp_link *l,
1828 emit(A64_STR64I(A64_ZR, A64_SP, run_ctx_off + cookie_off), ctx);
1830 emit_a64_mov_i64(A64_R(10), l->cookie, ctx);
1832 ctx);
1838 emit_addr_mov_i64(A64_R(19), (const u64)p, ctx);
1841 emit(A64_MOV(1, A64_R(0), A64_R(19)), ctx);
1843 emit(A64_ADD_I(1, A64_R(1), A64_SP, run_ctx_off), ctx);
1845 emit_call(enter_prog, ctx);
1848 emit(A64_MOV(1, A64_R(20), A64_R(0)), ctx);
1853 branch = ctx->image + ctx->idx;
1854 emit(A64_NOP, ctx);
1856 emit(A64_ADD_I(1, A64_R(0), A64_SP, args_off), ctx);
1858 emit_addr_mov_i64(A64_R(1), (const u64)p->insnsi, ctx);
1860 emit_call((const u64)p->bpf_func, ctx);
1863 emit(A64_STR64I(A64_R(0), A64_SP, retval_off), ctx);
1865 if (ctx->image) {
1866 int offset = &ctx->image[ctx->idx] - branch;
1871 emit(A64_MOV(1, A64_R(0), A64_R(19)), ctx);
1873 emit(A64_MOV(1, A64_R(1), A64_R(20)), ctx);
1875 emit(A64_ADD_I(1, A64_R(2), A64_SP, run_ctx_off), ctx);
1877 emit_call(exit_prog, ctx);
1880 static void invoke_bpf_mod_ret(struct jit_ctx *ctx, struct bpf_tramp_links *tl,
1889 emit(A64_STR64I(A64_ZR, A64_SP, retval_off), ctx);
1891 invoke_bpf_prog(ctx, tl->links[i], args_off, retval_off,
1896 emit(A64_LDR64I(A64_R(10), A64_SP, retval_off), ctx);
1900 branches[i] = ctx->image + ctx->idx;
1901 emit(A64_NOP, ctx);
1905 static void save_args(struct jit_ctx *ctx, int args_off, int nregs)
1910 emit(A64_STR64I(i, A64_SP, args_off), ctx);
1915 static void restore_args(struct jit_ctx *ctx, int args_off, int nregs)
1920 emit(A64_LDR64I(i, A64_SP, args_off), ctx);
1936 static int prepare_trampoline(struct jit_ctx *ctx, struct bpf_tramp_image *im,
2020 emit_bti(A64_BTI_JC, ctx);
2023 emit(A64_PUSH(A64_FP, A64_R(9), A64_SP), ctx);
2024 emit(A64_MOV(1, A64_FP, A64_SP), ctx);
2027 emit(A64_PUSH(A64_FP, A64_LR, A64_SP), ctx);
2028 emit(A64_MOV(1, A64_FP, A64_SP), ctx);
2031 emit(A64_SUB_I(1, A64_SP, A64_SP, stack_size), ctx);
2035 emit_addr_mov_i64(A64_R(10), (const u64)func_addr, ctx);
2036 emit(A64_STR64I(A64_R(10), A64_SP, ip_off), ctx);
2040 emit(A64_MOVZ(1, A64_R(10), nregs, 0), ctx);
2041 emit(A64_STR64I(A64_R(10), A64_SP, nregs_off), ctx);
2044 save_args(ctx, args_off, nregs);
2047 emit(A64_STR64I(A64_R(19), A64_SP, regs_off), ctx);
2048 emit(A64_STR64I(A64_R(20), A64_SP, regs_off + 8), ctx);
2051 emit_addr_mov_i64(A64_R(0), (const u64)im, ctx);
2052 emit_call((const u64)__bpf_tramp_enter, ctx);
2056 invoke_bpf_prog(ctx, fentry->links[i], args_off,
2066 invoke_bpf_mod_ret(ctx, fmod_ret, args_off, retval_off,
2071 restore_args(ctx, args_off, nregs);
2073 emit(A64_LDR64I(A64_R(10), A64_SP, retaddr_off), ctx);
2074 emit(A64_ADR(A64_LR, AARCH64_INSN_SIZE * 2), ctx);
2075 emit(A64_RET(A64_R(10)), ctx);
2077 emit(A64_STR64I(A64_R(0), A64_SP, retval_off), ctx);
2079 im->ip_after_call = ctx->ro_image + ctx->idx;
2080 emit(A64_NOP, ctx);
2084 for (i = 0; i < fmod_ret->nr_links && ctx->image != NULL; i++) {
2085 int offset = &ctx->image[ctx->idx] - branches[i];
2090 invoke_bpf_prog(ctx, fexit->links[i], args_off, retval_off,
2094 im->ip_epilogue = ctx->ro_image + ctx->idx;
2095 emit_addr_mov_i64(A64_R(0), (const u64)im, ctx);
2096 emit_call((const u64)__bpf_tramp_exit, ctx);
2100 restore_args(ctx, args_off, nregs);
2103 emit(A64_LDR64I(A64_R(19), A64_SP, regs_off), ctx);
2104 emit(A64_LDR64I(A64_R(20), A64_SP, regs_off + 8), ctx);
2107 emit(A64_LDR64I(A64_R(0), A64_SP, retval_off), ctx);
2110 emit(A64_MOV(1, A64_SP, A64_FP), ctx);
2113 emit(A64_POP(A64_FP, A64_LR, A64_SP), ctx);
2114 emit(A64_POP(A64_FP, A64_R(9), A64_SP), ctx);
2118 emit(A64_MOV(1, A64_LR, A64_R(9)), ctx);
2119 emit(A64_RET(A64_R(9)), ctx);
2122 emit(A64_MOV(1, A64_R(10), A64_LR), ctx);
2123 emit(A64_MOV(1, A64_LR, A64_R(9)), ctx);
2124 emit(A64_RET(A64_R(10)), ctx);
2129 return ctx->idx;
2150 struct jit_ctx ctx = {
2162 ret = prepare_trampoline(&ctx, &im, tlinks, func_addr, nregs, flags);
2203 struct jit_ctx ctx = {
2215 ret = prepare_trampoline(&ctx, im, tlinks, func_addr, nregs, flags);
2217 if (ret > 0 && validate_code(&ctx) < 0) {