Searched refs:ctx (Results 226 - 250 of 3532) sorted by relevance

1234567891011>>

/linux-master/tools/testing/selftests/lsm/
H A Dlsm_get_self_attr_test.c30 struct lsm_ctx *ctx = calloc(page_size, 1); local
32 ASSERT_NE(NULL, ctx);
34 ASSERT_EQ(-1, lsm_get_self_attr(LSM_ATTR_CURRENT, ctx, NULL, 0));
37 free(ctx);
59 struct lsm_ctx *ctx = calloc(page_size, 1); local
62 ASSERT_NE(NULL, ctx);
64 ASSERT_EQ(-1, lsm_get_self_attr(LSM_ATTR_CURRENT, ctx, &size, 0));
72 free(ctx);
78 struct lsm_ctx *ctx = calloc(page_size, 1); local
84 ASSERT_NE(NULL, ctx);
119 struct lsm_ctx *ctx = calloc(page_size, 1); local
144 struct lsm_ctx *ctx = calloc(page_size, 1); local
[all...]
/linux-master/arch/parisc/net/
H A Dbpf_jit_core.c16 static int build_body(struct hppa_jit_context *ctx, bool extra_pass, int *offset) argument
18 const struct bpf_prog *prog = ctx->prog;
21 ctx->reg_seen_collect = true;
26 ret = bpf_jit_emit_insn(insn, ctx, extra_pass);
31 offset[i] = ctx->ninsns;
35 ctx->reg_seen_collect = false;
51 struct hppa_jit_context *ctx; local
74 ctx = &jit_data->ctx;
76 if (ctx
[all...]
H A Dbpf_jit_comp32.c120 static void emit_hppa_copy(const s8 rs, const s8 rd, struct hppa_jit_context *ctx) argument
122 REG_SET_SEEN(ctx, rd);
125 REG_SET_SEEN(ctx, rs);
126 emit(hppa_copy(rs, rd), ctx); local
129 static void emit_hppa_xor(const s8 r1, const s8 r2, const s8 r3, struct hppa_jit_context *ctx) argument
131 REG_SET_SEEN(ctx, r1);
132 REG_SET_SEEN(ctx, r2);
133 REG_SET_SEEN(ctx, r3);
135 emit(hppa_copy(HPPA_REG_ZERO, r3), ctx); local
137 emit(hppa_xor(r1, r2, r3), ctx); local
141 emit_imm(const s8 rd, s32 imm, struct hppa_jit_context *ctx) argument
147 emit(hppa_ldi(imm, rd), ctx); local
150 emit(hppa_ldil(imm, rd), ctx); local
156 emit_imm32(const s8 *rd, s32 imm, struct hppa_jit_context *ctx) argument
160 emit_imm(lo(rd), imm, ctx); local
170 emit_imm64(const s8 *rd, s32 imm_hi, s32 imm_lo, struct hppa_jit_context *ctx) argument
173 emit_imm(hi(rd), imm_hi, ctx); local
174 emit_imm(lo(rd), imm_lo, ctx); local
177 __build_epilogue(bool is_tail_call, struct hppa_jit_context *ctx) argument
190 emit(hppa_bv(HPPA_REG_ZERO, HPPA_REG_T0, EXEC_NEXT_INSTR), ctx); local
192 emit(hppa_copy(HPPA_REG_TCC, HPPA_REG_TCC_IN_INIT), ctx); local
199 emit(EXIT_PTR_LOAD(HPPA_REG_RP), ctx); local
200 emit(EXIT_PTR_JUMP(HPPA_REG_RP, NOP_NEXT_INSTR), ctx); local
203 emit_hppa_copy(lo(r0), HPPA_REG_RET0, ctx); local
214 emit(hppa_bv(HPPA_REG_ZERO, HPPA_REG_RP, EXEC_NEXT_INSTR), ctx); local
224 bpf_get_reg64_offset(const s8 *reg, const s8 *tmp, u16 offset_sp, struct hppa_jit_context *ctx) argument
237 bpf_get_reg64(const s8 *reg, const s8 *tmp, struct hppa_jit_context *ctx) argument
243 bpf_get_reg64_ref(const s8 *reg, const s8 *tmp, bool must_load, struct hppa_jit_context *ctx) argument
260 bpf_put_reg64(const s8 *reg, const s8 *src, struct hppa_jit_context *ctx) argument
264 emit(hppa_stw(hi(src), REG_SIZE * hi(reg), HPPA_REG_SP), ctx); local
265 emit(hppa_stw(lo(src), REG_SIZE * lo(reg), HPPA_REG_SP), ctx); local
269 bpf_save_R0(struct hppa_jit_context *ctx) argument
274 bpf_restore_R0(struct hppa_jit_context *ctx) argument
280 bpf_get_reg32(const s8 *reg, const s8 *tmp, struct hppa_jit_context *ctx) argument
291 bpf_get_reg32_ref(const s8 *reg, const s8 *tmp, struct hppa_jit_context *ctx) argument
304 bpf_put_reg32(const s8 *reg, const s8 *src, struct hppa_jit_context *ctx) argument
309 emit(hppa_stw(lo(src), REG_SIZE * lo(reg), HPPA_REG_SP), ctx); local
312 emit(hppa_stw(HPPA_REG_ZERO, REG_SIZE * hi(reg), HPPA_REG_SP), ctx); local
325 emit_call_millicode(void *func, const s8 arg0, const s8 arg1, u8 opcode, struct hppa_jit_context *ctx) argument
338 emit(hppa_ldil(func_addr, HPPA_REG_R31), ctx); local
349 emit(hppa_nop(), ctx); /* this nop is needed here for delay slot */ local
359 emit_call_libgcc_ll(void *func, const s8 *arg0, const s8 *arg1, u8 opcode, struct hppa_jit_context *ctx) argument
364 emit_hppa_copy(lo(arg0), HPPA_REG_ARG0, ctx); local
365 emit_hppa_copy(hi(arg0), HPPA_REG_ARG1, ctx); local
366 emit_hppa_copy(lo(arg1), HPPA_REG_ARG2, ctx); local
367 emit_hppa_copy(hi(arg1), HPPA_REG_ARG3, ctx); local
377 emit(hppa_ldil(func_addr, HPPA_REG_R31), ctx); local
401 emit_jump(s32 paoff, bool force_far, struct hppa_jit_context *ctx) argument
411 emit(hppa_nop(), ctx); local
417 emit(hppa_ldil(addr, HPPA_REG_R31), ctx); local
421 emit_alu_i64(const s8 *dst, s32 imm, struct hppa_jit_context *ctx, const u8 op) argument
447 emit_imm(hi(rd), -1, ctx); local
451 emit_hppa_xor(lo(rd), HPPA_REG_T0, lo(rd), ctx); local
454 emit_hppa_xor(hi(rd), HPPA_REG_T0, hi(rd), ctx); local
465 emit_hppa_copy(lo(rd), hi(rd), ctx); local
480 emit_hppa_copy(hi(rd), lo(rd), ctx); local
495 emit_hppa_copy(hi(rd), lo(rd), ctx); local
509 emit_alu_i32(const s8 *dst, s32 imm, struct hppa_jit_context *ctx, const u8 op) argument
523 emit_imm(lo(rd), imm, ctx); local
543 emit_hppa_xor(lo(rd), HPPA_REG_T0, lo(rd), ctx); local
564 emit_alu_r64(const s8 *dst, const s8 *src, struct hppa_jit_context *ctx, const u8 op) argument
580 emit_hppa_copy(lo(rs), lo(rd), ctx); local
581 emit_hppa_copy(hi(rs), hi(rd), ctx); local
600 emit_hppa_xor(lo(rd), lo(rs), lo(rd), ctx); local
601 emit_hppa_xor(hi(rd), hi(rs), hi(rd), ctx); local
622 emit(hppa_sub(HPPA_REG_ZERO, lo(rd), lo(rd)), ctx); local
623 emit(hppa_subb(HPPA_REG_ZERO, hi(rd), hi(rd)), ctx); local
632 emit_alu_r32(const s8 *dst, const s8 *src, struct hppa_jit_context *ctx, const u8 op) argument
648 emit_hppa_copy(lo(rs), lo(rd), ctx); local
663 emit_hppa_xor(lo(rd), lo(rs), lo(rd), ctx); local
676 emit(hppa_mtsar(HPPA_REG_T0), ctx); local
686 emit(hppa_mtsar(HPPA_REG_T0), ctx); local
690 emit(hppa_sub(HPPA_REG_ZERO, lo(rd), lo(rd)), ctx); // sub r0,rd,rd local
699 emit_branch_r64(const s8 *src1, const s8 *src2, s32 paoff, struct hppa_jit_context *ctx, const u8 op) argument
770 emit(hppa_and(hi(rs1), hi(rs2), HPPA_REG_T0), ctx); local
771 emit(hppa_and(lo(rs1), lo(rs2), HPPA_REG_T1), ctx); local
772 emit(hppa_bne(HPPA_REG_T0, HPPA_REG_ZERO, JUMP(1)), ctx); local
773 emit(hppa_beq(HPPA_REG_T1, HPPA_REG_ZERO, NO_JUMP(0)), ctx); local
789 emit_bcc(u8 op, u8 rd, u8 rs, int paoff, struct hppa_jit_context *ctx) argument
800 emit(hppa_and(rd, rs, HPPA_REG_T0), ctx); local
824 emit(hppa_beq(rd, rs, off), ctx); local
827 emit(hppa_bgtu(rd, rs, off), ctx); local
830 emit(hppa_bltu(rd, rs, off), ctx); local
833 emit(hppa_bgeu(rd, rs, off), ctx); local
836 emit(hppa_bleu(rd, rs, off), ctx); local
839 emit(hppa_bne(rd, rs, off), ctx); local
842 emit(hppa_bgt(rd, rs, off), ctx); local
845 emit(hppa_blt(rd, rs, off), ctx); local
848 emit(hppa_bge(rd, rs, off), ctx); local
851 emit(hppa_ble(rd, rs, off), ctx); local
866 emit_branch_r32(const s8 *src1, const s8 *src2, s32 paoff, struct hppa_jit_context *ctx, const u8 op) argument
886 emit_call(bool fixed, u64 addr, struct hppa_jit_context *ctx) argument
894 emit(hppa_ldo(offset_sp, HPPA_REG_SP, HPPA_REG_SP), ctx); local
910 emit_hppa_copy(hi(reg), HPPA_REG_ARG3, ctx); local
911 emit_hppa_copy(lo(reg), HPPA_REG_ARG2, ctx); local
914 emit_hppa_copy(hi(reg), HPPA_REG_ARG1, ctx); local
915 emit_hppa_copy(lo(reg), HPPA_REG_ARG0, ctx); local
919 emit(hppa_copy(HPPA_REG_TCC, HPPA_REG_TCC_SAVED), ctx); local
926 emit(hppa_ldil(addr, HPPA_REG_R31), ctx); local
933 emit(hppa_copy(HPPA_REG_TCC_SAVED, HPPA_REG_TCC), ctx); local
943 emit_bpf_tail_call(int insn, struct hppa_jit_context *ctx) argument
957 emit(EXIT_PTR_LOAD(HPPA_REG_RP), ctx); local
962 emit(hppa_ldw(off, lo(arr_reg), HPPA_REG_T1), ctx); local
969 emit(EXIT_PTR_JUMP(HPPA_REG_RP, NOP_NEXT_INSTR), ctx); local
978 emit(EXIT_PTR_JUMP(HPPA_REG_RP, NOP_NEXT_INSTR), ctx); local
986 emit(hppa_sh2add(lo(idx_reg), lo(arr_reg), HPPA_REG_T0), ctx); local
989 emit(hppa_ldw(off, HPPA_REG_T0, HPPA_REG_T0), ctx); local
991 emit(EXIT_PTR_JUMP(HPPA_REG_RP, NOP_NEXT_INSTR), ctx); local
1000 emit(hppa_ldw(off, HPPA_REG_T0, HPPA_REG_T0), ctx); local
1006 emit_load_r64(const s8 *dst, const s8 *src, s16 off, struct hppa_jit_context *ctx, const u8 size) argument
1021 emit(hppa_addil(off, lo(rs)), ctx); local
1052 emit_store_r64(const s8 *dst, const s8 *src, s16 off, struct hppa_jit_context *ctx, const u8 size, const u8 mode) argument
1068 emit(hppa_addil(off, lo(rd)), ctx); local
1092 emit_rev16(const s8 rd, struct hppa_jit_context *ctx) argument
1099 emit_rev32(const s8 rs, const s8 rd, struct hppa_jit_context *ctx) argument
1106 emit_zext64(const s8 *dst, struct hppa_jit_context *ctx) argument
1116 bpf_jit_emit_insn(const struct bpf_insn *insn, struct hppa_jit_context *ctx, bool extra_pass) argument
1275 emit_rev16(lo(rd), ctx); local
1280 emit_rev32(lo(rd), lo(rd), ctx); local
1286 emit_hppa_copy(hi(rd), HPPA_REG_T0, ctx); local
1287 emit_rev32(lo(rd), hi(rd), ctx); local
1396 emit(EXIT_PTR_LOAD(HPPA_REG_RP), ctx); local
1397 emit(EXIT_PTR_JUMP(HPPA_REG_RP, NOP_NEXT_INSTR), ctx); local
1467 bpf_jit_build_prologue(struct hppa_jit_context *ctx) argument
1500 emit(hppa_ldi(MAX_TAIL_CALL_CNT, HPPA_REG_TCC_IN_INIT), ctx); local
1505 emit(hppa_ldi(MAX_TAIL_CALL_CNT, HPPA_REG_R1), ctx); local
1510 emit(hppa_ldo(stack_adjust, HPPA_REG_SP, HPPA_REG_SP), ctx); // ldo stack_adjust(sp),sp (increase stack) local
1532 emit(hppa_ldi(MAX_TAIL_CALL_CNT, HPPA_REG_TCC), ctx); local
1541 emit(hppa_ldil(addr, HPPA_REG_T2), ctx); local
1543 emit(EXIT_PTR_STORE(HPPA_REG_T2), ctx); local
1609 emit(hppa_nop(), ctx); local
1612 bpf_jit_build_epilogue(struct hppa_jit_context *ctx) argument
[all...]
/linux-master/drivers/gpu/drm/nouveau/nvkm/subdev/mxm/
H A Dnv50.c39 struct context *ctx = info; local
44 desc.dig_conn == ctx->desc.dig_conn)
53 struct context *ctx = info; local
56 mxms_output_device(mxm, data, &ctx->desc);
59 if ((ctx->outp[0] & 0x0000000f) != ctx->desc.outp_type)
68 u8 link = mxm_sor_map(bios, ctx->desc.dig_conn);
69 if ((ctx->outp[0] & 0x0f000000) != (link & 0x0f) << 24)
74 if ((link & ((ctx->outp[1] & 0x00000030) >> 4)) != link)
84 if (ctx
98 struct context ctx = { .outp = (u32 *)(bios->data + pdcb) }; local
[all...]
/linux-master/io_uring/
H A Drsrc.h25 typedef void (rsrc_put_fn)(struct io_ring_ctx *ctx, struct io_rsrc_put *prsrc);
28 struct io_ring_ctx *ctx; member in struct:io_rsrc_data
37 struct io_ring_ctx *ctx; member in struct:io_rsrc_node
54 void io_rsrc_node_destroy(struct io_ring_ctx *ctx, struct io_rsrc_node *ref_node);
55 struct io_rsrc_node *io_rsrc_node_alloc(struct io_ring_ctx *ctx);
62 void __io_sqe_buffers_unregister(struct io_ring_ctx *ctx);
63 int io_sqe_buffers_unregister(struct io_ring_ctx *ctx);
64 int io_sqe_buffers_register(struct io_ring_ctx *ctx, void __user *arg,
66 void __io_sqe_files_unregister(struct io_ring_ctx *ctx);
67 int io_sqe_files_unregister(struct io_ring_ctx *ctx);
78 io_put_rsrc_node(struct io_ring_ctx *ctx, struct io_rsrc_node *node) argument
86 io_charge_rsrc_node(struct io_ring_ctx *ctx, struct io_rsrc_node *node) argument
92 __io_req_set_rsrc_node(struct io_kiocb *req, struct io_ring_ctx *ctx) argument
100 io_req_set_rsrc_node(struct io_kiocb *req, struct io_ring_ctx *ctx, unsigned int issue_flags) argument
119 io_rsrc_init(struct io_ring_ctx *ctx) argument
[all...]
H A Dio_uring.c144 static bool io_uring_try_cancel_requests(struct io_ring_ctx *ctx,
177 static inline unsigned int __io_cqring_events(struct io_ring_ctx *ctx) argument
179 return ctx->cached_cq_tail - READ_ONCE(ctx->rings->cq.head);
182 static inline unsigned int __io_cqring_events_user(struct io_ring_ctx *ctx) argument
184 return READ_ONCE(ctx->rings->cq.tail) - READ_ONCE(ctx->rings->cq.head);
213 struct io_ring_ctx *ctx = head->ctx; local
216 spin_lock_irq(&ctx
231 io_req_add_to_cache(struct io_kiocb *req, struct io_ring_ctx *ctx) argument
238 struct io_ring_ctx *ctx = container_of(ref, struct io_ring_ctx, refs); local
245 struct io_ring_ctx *ctx = container_of(work, struct io_ring_ctx, local
276 struct io_ring_ctx *ctx; local
361 io_account_cq_overflow(struct io_ring_ctx *ctx) argument
372 struct io_ring_ctx *ctx = req->ctx; local
456 struct io_ring_ctx *ctx = req->ctx; local
491 struct io_ring_ctx *ctx = req->ctx; local
530 io_queue_deferred(struct io_ring_ctx *ctx) argument
562 io_eventfd_signal(struct io_ring_ctx *ctx) argument
599 io_eventfd_flush_signal(struct io_ring_ctx *ctx) argument
622 __io_commit_cqring_flush(struct io_ring_ctx *ctx) argument
637 __io_cq_lock(struct io_ring_ctx *ctx) argument
649 __io_cq_unlock_post(struct io_ring_ctx *ctx) argument
665 io_commit_cqring(ctx); variable
667 io_cqring_wake(ctx); variable
668 io_commit_cqring_flush(ctx); variable
671 __io_cqring_overflow_flush(struct io_ring_ctx *ctx, bool dying) argument
708 io_cqring_overflow_kill(struct io_ring_ctx *ctx) argument
714 io_cqring_do_overflow_flush(struct io_ring_ctx *ctx) argument
768 io_cqring_event_overflow(struct io_ring_ctx *ctx, u64 user_data, s32 res, u32 cflags, u64 extra1, u64 extra2) argument
821 io_cqe_cache_refill(struct io_ring_ctx *ctx, bool overflow) argument
853 io_fill_cqe_aux(struct io_ring_ctx *ctx, u64 user_data, s32 res, u32 cflags) argument
881 io_post_aux_cqe(struct io_ring_ctx *ctx, u64 user_data, s32 res, u32 cflags) argument
900 struct io_ring_ctx *ctx = req->ctx; local
915 struct io_ring_ctx *ctx = req->ctx; local
966 io_preinit_req(struct io_kiocb *req, struct io_ring_ctx *ctx) argument
1024 struct io_ring_ctx *ctx = req->ctx; local
1048 ctx_flush_and_put(struct io_ring_ctx *ctx, struct io_tw_state *ts) argument
1069 struct io_ring_ctx *ctx = NULL; local
1180 struct io_ring_ctx *ctx = req->ctx; local
1249 struct io_ring_ctx *ctx = req->ctx; local
1284 io_move_task_work_from_local(struct io_ring_ctx *ctx) argument
1298 io_run_local_work_continue(struct io_ring_ctx *ctx, int events, int min_events) argument
1310 __io_run_local_work(struct io_ring_ctx *ctx, struct io_tw_state *ts, int min_events) argument
1349 io_run_local_work_locked(struct io_ring_ctx *ctx, int min_events) argument
1359 io_run_local_work(struct io_ring_ctx *ctx, int min_events) argument
1452 __io_cq_lock(ctx); variable
1468 __io_cq_unlock_post(ctx); variable
1477 io_cqring_events(struct io_ring_ctx *ctx) argument
1488 io_iopoll_try_reap_events(struct io_ring_ctx *ctx) argument
1512 io_iopoll_check(struct io_ring_ctx *ctx, long min) argument
1599 struct io_ring_ctx *ctx = req->ctx; local
1684 struct io_ring_ctx *ctx = req->ctx; variable in typeref:struct:io_ring_ctx
1900 struct io_ring_ctx *ctx = req->ctx; local
1999 io_check_restriction(struct io_ring_ctx *ctx, struct io_kiocb *req, unsigned int sqe_flags) argument
2019 struct io_ring_ctx *ctx = req->ctx; local
2141 struct io_ring_ctx *ctx = req->ctx; local
2227 io_submit_state_end(struct io_ring_ctx *ctx) argument
2252 io_commit_sqring(struct io_ring_ctx *ctx) argument
2272 io_get_sqe(struct io_ring_ctx *ctx, const struct io_uring_sqe **sqe) argument
2350 io_submit_state_end(ctx); variable
2352 io_commit_sqring(ctx); variable
2370 io_run_task_work_sig(struct io_ring_ctx *ctx) argument
2394 io_cqring_wait_schedule(struct io_ring_ctx *ctx, struct io_wait_queue *iowq) argument
2430 io_cqring_wait(struct io_ring_ctx *ctx, int min_events, const sigset_t __user *sig, size_t sigsz, struct __kernel_timespec __user *uts) argument
2545 io_rings_map(struct io_ring_ctx *ctx, unsigned long uaddr, size_t size) argument
2552 io_sqes_map(struct io_ring_ctx *ctx, unsigned long uaddr, size_t size) argument
2559 io_rings_free(struct io_ring_ctx *ctx) argument
2579 rings_size(struct io_ring_ctx *ctx, unsigned int sq_entries, unsigned int cq_entries, size_t *sq_offset) argument
2616 io_req_caches_free(struct io_ring_ctx *ctx) argument
2633 io_ring_ctx_free(struct io_ring_ctx *ctx) argument
2687 struct io_ring_ctx *ctx = container_of(cb, struct io_ring_ctx, local
2702 io_activate_pollwq(struct io_ring_ctx *ctx) argument
2726 struct io_ring_ctx *ctx = file->private_data; local
2764 struct io_ring_ctx *ctx; member in struct:io_tctx_exit
2793 struct io_ring_ctx *ctx = container_of(work, struct io_ring_ctx, exit_work); local
2886 io_ring_ctx_wait_and_kill(struct io_ring_ctx *ctx) argument
2911 struct io_ring_ctx *ctx = file->private_data; local
2931 io_cancel_defer_files(struct io_ring_ctx *ctx, struct task_struct *task, bool cancel_all) argument
2958 io_uring_try_cancel_iowq(struct io_ring_ctx *ctx) argument
2982 io_uring_try_cancel_requests(struct io_ring_ctx *ctx, struct task_struct *task, bool cancel_all) argument
3055 struct io_ring_ctx *ctx; local
3185 struct io_ring_ctx *ctx; local
3331 io_allocate_scq_urings(struct io_ring_ctx *ctx, struct io_uring_params *p) argument
3401 io_uring_get_file(struct io_ring_ctx *ctx) argument
3411 struct io_ring_ctx *ctx; local
[all...]
H A Drsrc.c28 static void io_rsrc_buf_put(struct io_ring_ctx *ctx, struct io_rsrc_put *prsrc);
29 static int io_sqe_buffer_register(struct io_ring_ctx *ctx, struct iovec *iov,
63 static void io_unaccount_mem(struct io_ring_ctx *ctx, unsigned long nr_pages) argument
65 if (ctx->user)
66 __io_unaccount_mem(ctx->user, nr_pages);
68 if (ctx->mm_account)
69 atomic64_sub(nr_pages, &ctx->mm_account->pinned_vm);
72 static int io_account_mem(struct io_ring_ctx *ctx, unsigned long nr_pages) argument
76 if (ctx->user) {
77 ret = __io_account_mem(ctx
88 io_copy_iov(struct io_ring_ctx *ctx, struct iovec *dst, void __user *arg, unsigned index) argument
137 io_buffer_unmap(struct io_ring_ctx *ctx, struct io_mapped_ubuf **slot) argument
172 io_rsrc_node_destroy(struct io_ring_ctx *ctx, struct io_rsrc_node *node) argument
181 struct io_ring_ctx *ctx = node->ctx; variable in typeref:struct:io_ring_ctx
199 io_rsrc_node_alloc(struct io_ring_ctx *ctx) argument
216 io_rsrc_ref_quiesce(struct io_rsrc_data *data, struct io_ring_ctx *ctx) argument
316 io_rsrc_data_alloc(struct io_ring_ctx *ctx, int type, u64 __user *utags, unsigned nr, struct io_rsrc_data **pdata) argument
353 __io_sqe_files_update(struct io_ring_ctx *ctx, struct io_uring_rsrc_update2 *up, unsigned nr_args) argument
418 __io_sqe_buffers_update(struct io_ring_ctx *ctx, struct io_uring_rsrc_update2 *up, unsigned int nr_args) argument
472 __io_register_rsrc_update(struct io_ring_ctx *ctx, unsigned type, struct io_uring_rsrc_update2 *up, unsigned nr_args) argument
492 io_register_files_update(struct io_ring_ctx *ctx, void __user *arg, unsigned nr_args) argument
507 io_register_rsrc_update(struct io_ring_ctx *ctx, void __user *arg, unsigned size, unsigned type) argument
521 io_register_rsrc(struct io_ring_ctx *ctx, void __user *arg, unsigned int size, unsigned int type) argument
612 struct io_ring_ctx *ctx = req->ctx; local
640 struct io_ring_ctx *ctx = data->ctx; local
659 __io_sqe_files_unregister(struct io_ring_ctx *ctx) argument
679 io_sqe_files_unregister(struct io_ring_ctx *ctx) argument
699 io_sqe_files_register(struct io_ring_ctx *ctx, void __user *arg, unsigned nr_args, u64 __user *tags) argument
766 io_rsrc_buf_put(struct io_ring_ctx *ctx, struct io_rsrc_put *prsrc) argument
772 __io_sqe_buffers_unregister(struct io_ring_ctx *ctx) argument
785 io_sqe_buffers_unregister(struct io_ring_ctx *ctx) argument
814 headpage_already_acct(struct io_ring_ctx *ctx, struct page **pages, int nr_pages, struct page *hpage) argument
842 io_buffer_account_pin(struct io_ring_ctx *ctx, struct page **pages, int nr_pages, struct io_mapped_ubuf *imu, struct page **last_hpage) argument
874 io_sqe_buffer_register(struct io_ring_ctx *ctx, struct iovec *iov, struct io_mapped_ubuf **pimu, struct page **last_hpage) argument
962 io_buffers_map_alloc(struct io_ring_ctx *ctx, unsigned int nr_args) argument
968 io_sqe_buffers_register(struct io_ring_ctx *ctx, void __user *arg, unsigned int nr_args, u64 __user *tags) argument
[all...]
/linux-master/arch/sparc/net/
H A Dbpf_jit_comp_64.c236 static void emit(const u32 insn, struct jit_ctx *ctx) argument
238 if (ctx->image != NULL)
239 ctx->image[ctx->idx] = insn;
241 ctx->idx++;
244 static void emit_call(u32 *func, struct jit_ctx *ctx) argument
246 if (ctx->image != NULL) {
247 void *here = &ctx->image[ctx->idx];
251 ctx
256 emit_nop(struct jit_ctx *ctx) argument
261 emit_reg_move(u32 from, u32 to, struct jit_ctx *ctx) argument
267 emit_set_const(s32 K, u32 reg, struct jit_ctx *ctx) argument
269 emit(SETHI(K, reg), ctx); local
270 emit(OR_LO(K, reg), ctx); local
274 emit_set_const_sext(s32 K, u32 reg, struct jit_ctx *ctx) argument
277 emit(SETHI(K, reg), ctx); local
278 emit(OR_LO(K, reg), ctx); local
283 emit(SETHI(hbits, reg), ctx); local
288 emit_alu(u32 opcode, u32 src, u32 dst, struct jit_ctx *ctx) argument
293 emit_alu3(u32 opcode, u32 a, u32 b, u32 c, struct jit_ctx *ctx) argument
298 emit_alu_K(unsigned int opcode, unsigned int dst, unsigned int imm, struct jit_ctx *ctx) argument
317 emit_alu3_K(unsigned int opcode, unsigned int src, unsigned int imm, unsigned int dst, struct jit_ctx *ctx) argument
336 emit_loadimm32(s32 K, unsigned int dest, struct jit_ctx *ctx) argument
346 emit_loadimm(s32 K, unsigned int dest, struct jit_ctx *ctx) argument
356 emit_loadimm_sext(s32 K, unsigned int dest, struct jit_ctx *ctx) argument
448 sparc_emit_set_const64_quick2(unsigned long high_bits, unsigned long low_imm, unsigned int dest, int shift_count, struct jit_ctx *ctx) argument
465 emit_loadimm64(u64 K, unsigned int dest, struct jit_ctx *ctx) argument
524 emit(SETHI(focus_bits, dest), ctx); local
561 emit(SETHI(fast_int, dest), ctx); local
616 emit_branch(unsigned int br_opc, unsigned int from_idx, unsigned int to_idx, struct jit_ctx *ctx) argument
627 emit_cbcond(unsigned int cb_opc, unsigned int from_idx, unsigned int to_idx, const u8 dst, const u8 src, struct jit_ctx *ctx) argument
635 emit_cbcondi(unsigned int cb_opc, unsigned int from_idx, unsigned int to_idx, const u8 dst, s32 imm, struct jit_ctx *ctx) argument
658 emit_compare_and_branch(const u8 code, const u8 dst, u8 src, const s32 imm, bool is_imm, int branch_dst, struct jit_ctx *ctx) argument
797 build_prologue(struct jit_ctx *ctx) argument
839 build_epilogue(struct jit_ctx *ctx) argument
850 emit_tail_call(struct jit_ctx *ctx) argument
895 build_insn(const struct bpf_insn *insn, struct jit_ctx *ctx) argument
1438 build_body(struct jit_ctx *ctx) argument
1477 struct jit_ctx ctx; member in struct:sparc64_jit_data
1488 struct jit_ctx ctx; local
[all...]
/linux-master/drivers/gpu/drm/panel/
H A Dpanel-raydium-rm69380.c36 static void rm69380_reset(struct rm69380_panel *ctx) argument
38 gpiod_set_value_cansleep(ctx->reset_gpio, 0);
40 gpiod_set_value_cansleep(ctx->reset_gpio, 1);
42 gpiod_set_value_cansleep(ctx->reset_gpio, 0);
46 static int rm69380_on(struct rm69380_panel *ctx) argument
48 struct mipi_dsi_device *dsi = ctx->dsi[0];
53 if (ctx->dsi[1])
54 ctx->dsi[1]->mode_flags |= MIPI_DSI_MODE_LPM;
90 static int rm69380_off(struct rm69380_panel *ctx) argument
92 struct mipi_dsi_device *dsi = ctx
119 struct rm69380_panel *ctx = to_rm69380_panel(panel); local
144 struct rm69380_panel *ctx = to_rm69380_panel(panel); local
241 struct rm69380_panel *ctx; local
321 struct rm69380_panel *ctx = mipi_dsi_get_drvdata(dsi); local
[all...]
H A Dpanel-lg-lg4573.c40 static int lg4573_spi_write_u16(struct lg4573 *ctx, u16 data) argument
48 dev_dbg(ctx->panel.dev, "writing data: %x\n", data);
53 return spi_sync(ctx->spi, &msg);
56 static int lg4573_spi_write_u16_array(struct lg4573 *ctx, const u16 *buffer, argument
63 ret = lg4573_spi_write_u16(ctx, buffer[i]);
71 static int lg4573_spi_write_dcs(struct lg4573 *ctx, u8 dcs) argument
73 return lg4573_spi_write_u16(ctx, (0x70 << 8 | dcs));
76 static int lg4573_display_on(struct lg4573 *ctx) argument
80 ret = lg4573_spi_write_dcs(ctx, MIPI_DCS_EXIT_SLEEP_MODE);
86 return lg4573_spi_write_dcs(ctx, MIPI_DCS_SET_DISPLAY_O
89 lg4573_display_off(struct lg4573 *ctx) argument
102 lg4573_display_mode_settings(struct lg4573 *ctx) argument
120 lg4573_power_settings(struct lg4573 *ctx) argument
136 lg4573_gamma_settings(struct lg4573 *ctx) argument
161 lg4573_init(struct lg4573 *ctx) argument
178 lg4573_power_on(struct lg4573 *ctx) argument
185 struct lg4573 *ctx = panel_to_lg4573(panel); local
192 struct lg4573 *ctx = panel_to_lg4573(panel); local
243 struct lg4573 *ctx; local
271 struct lg4573 *ctx = spi_get_drvdata(spi); local
[all...]
/linux-master/drivers/soundwire/
H A Dintel_init.c33 struct sdw_intel_ctx *ctx,
59 * no need to check if ctx->ldev[i] is NULL later on.
61 ctx->ldev[link_id] = ldev;
73 link->shim_lock = &ctx->shim_lock;
86 link->shim_mask = &ctx->shim_mask;
87 link->link_mask = ctx->link_mask;
118 static int sdw_intel_cleanup(struct sdw_intel_ctx *ctx) argument
124 link_mask = ctx->link_mask;
126 for (i = 0; i < ctx->count; i++) {
130 ldev = ctx
32 intel_link_dev_register(struct sdw_intel_res *res, struct sdw_intel_ctx *ctx, struct fwnode_handle *fwnode, const char *name, int link_id) argument
144 struct sdw_intel_ctx *ctx = dev_id; local
159 struct sdw_intel_ctx *ctx; local
285 sdw_intel_startup_controller(struct sdw_intel_ctx *ctx) argument
347 sdw_intel_startup(struct sdw_intel_ctx *ctx) argument
358 sdw_intel_exit(struct sdw_intel_ctx *ctx) argument
380 sdw_intel_process_wakeen_event(struct sdw_intel_ctx *ctx) argument
[all...]
/linux-master/drivers/misc/cxl/
H A Dirq.c26 static irqreturn_t schedule_cxl_fault(struct cxl_context *ctx, u64 dsisr, u64 dar) argument
28 ctx->dsisr = dsisr;
29 ctx->dar = dar;
30 schedule_work(&ctx->fault_work);
34 irqreturn_t cxl_irq_psl9(int irq, struct cxl_context *ctx, struct cxl_irq_info *irq_info) argument
41 trace_cxl_psl9_irq(ctx, irq, dsisr, dar);
43 pr_devel("CXL interrupt %i for afu pe: %i DSISR: %#llx DAR: %#llx\n", irq, ctx->pe, dsisr, dar);
46 pr_devel("CXL interrupt: Scheduling translation fault handling for later (pe: %i)\n", ctx->pe);
47 return schedule_cxl_fault(ctx, dsisr, dar);
51 return cxl_ops->handle_psl_slice_error(ctx, dsis
85 cxl_irq_psl8(int irq, struct cxl_context *ctx, struct cxl_irq_info *irq_info) argument
173 struct cxl_context *ctx = data; local
281 afu_irq_name_free(struct cxl_context *ctx) argument
292 afu_allocate_irqs(struct cxl_context *ctx, u32 count) argument
357 afu_register_hwirqs(struct cxl_context *ctx) argument
390 afu_register_irqs(struct cxl_context *ctx, u32 count) argument
402 afu_release_irqs(struct cxl_context *ctx, void *cookie) argument
[all...]
/linux-master/arch/riscv/net/
H A Dbpf_jit_core.c19 static int build_body(struct rv_jit_context *ctx, bool extra_pass, int *offset) argument
21 const struct bpf_prog *prog = ctx->prog;
28 ret = bpf_jit_emit_insn(insn, ctx, extra_pass);
33 offset[i] = ctx->ninsns;
52 struct rv_jit_context *ctx; local
75 ctx = &jit_data->ctx;
77 if (ctx->offset) {
79 prog_size = sizeof(*ctx->insns) * ctx
[all...]
/linux-master/tools/testing/selftests/bpf/benchs/
H A Dbench_bloom_filter_map.c11 static struct ctx { struct
29 } ctx = { variable in typeref:struct:ctx
139 ctx.map_prepare_err = true;
144 i = __atomic_add_fetch(&ctx.next_map_idx, 1, __ATOMIC_RELAXED);
154 ctx.map_prepare_err = true;
159 if (ctx.use_hashmap) {
160 err = bpf_map_update_elem(ctx.hashmap_fd, val, val, BPF_NOEXIST);
163 ctx.map_prepare_err = true;
174 if (ctx.use_array_map) {
175 err = bpf_map_update_elem(ctx
[all...]
/linux-master/crypto/
H A Decrdsa.c74 struct ecrdsa_ctx *ctx = akcipher_tfm_ctx(tfm); local
92 if (!ctx->curve ||
93 !ctx->digest ||
95 !ctx->pub_key.x ||
96 req->dst_len != ctx->digest_len ||
97 req->dst_len != ctx->curve->g.ndigits * sizeof(u64) ||
98 ctx->pub_key.ndigits != ctx->curve->g.ndigits ||
116 vli_cmp(r, ctx->curve->n, ndigits) >= 0 ||
118 vli_cmp(s, ctx
153 struct ecrdsa_ctx *ctx = context; local
166 struct ecrdsa_ctx *ctx = context; local
177 struct ecrdsa_ctx *ctx = context; local
194 struct ecrdsa_ctx *ctx = akcipher_tfm_ctx(tfm); local
254 struct ecrdsa_ctx *ctx = akcipher_tfm_ctx(tfm); local
[all...]
/linux-master/drivers/gpu/drm/exynos/
H A Dexynos_mixer.c184 static inline u32 vp_reg_read(struct mixer_context *ctx, u32 reg_id) argument
186 return readl(ctx->vp_regs + reg_id);
189 static inline void vp_reg_write(struct mixer_context *ctx, u32 reg_id, argument
192 writel(val, ctx->vp_regs + reg_id);
195 static inline void vp_reg_writemask(struct mixer_context *ctx, u32 reg_id, argument
198 u32 old = vp_reg_read(ctx, reg_id);
201 writel(val, ctx->vp_regs + reg_id);
204 static inline u32 mixer_reg_read(struct mixer_context *ctx, u32 reg_id) argument
206 return readl(ctx->mixer_regs + reg_id);
209 static inline void mixer_reg_write(struct mixer_context *ctx, u3 argument
215 mixer_reg_writemask(struct mixer_context *ctx, u32 reg_id, u32 val, u32 mask) argument
224 mixer_regs_dump(struct mixer_context *ctx) argument
256 vp_regs_dump(struct mixer_context *ctx) argument
291 vp_filter_set(struct mixer_context *ctx, int reg_id, const u8 *data, unsigned int size) argument
303 vp_default_filter(struct mixer_context *ctx) argument
313 mixer_cfg_gfx_blend(struct mixer_context *ctx, unsigned int win, unsigned int pixel_alpha, unsigned int alpha) argument
341 mixer_cfg_vp_blend(struct mixer_context *ctx, unsigned int alpha) argument
353 mixer_is_synced(struct mixer_context *ctx) argument
384 mixer_wait_for_sync(struct mixer_context *ctx) argument
396 mixer_disable_sync(struct mixer_context *ctx) argument
401 mixer_enable_sync(struct mixer_context *ctx) argument
411 mixer_cfg_scan(struct mixer_context *ctx, int width, int height) argument
428 mixer_cfg_rgb_fmt(struct mixer_context *ctx, struct drm_display_mode *mode) argument
456 mixer_cfg_layer(struct mixer_context *ctx, unsigned int win, unsigned int priority, bool enable) argument
488 mixer_run(struct mixer_context *ctx) argument
493 mixer_stop(struct mixer_context *ctx) argument
504 mixer_commit(struct mixer_context *ctx) argument
513 vp_video_buffer(struct mixer_context *ctx, struct exynos_drm_plane *plane) argument
600 mixer_graph_buffer(struct mixer_context *ctx, struct exynos_drm_plane *plane) argument
687 vp_win_reset(struct mixer_context *ctx) argument
701 mixer_win_reset(struct mixer_context *ctx) argument
745 struct mixer_context *ctx = arg; local
939 struct mixer_context *ctx = crtc->ctx; local
994 struct mixer_context *ctx = crtc->ctx; local
1028 struct mixer_context *ctx = crtc->ctx; local
1050 struct mixer_context *ctx = crtc->ctx; local
1077 struct mixer_context *ctx = crtc->ctx; local
1177 struct mixer_context *ctx = dev_get_drvdata(dev); local
1216 struct mixer_context *ctx = dev_get_drvdata(dev); local
1230 struct mixer_context *ctx; local
1270 struct mixer_context *ctx = dev_get_drvdata(dev); local
1285 struct mixer_context *ctx = dev_get_drvdata(dev); local
[all...]
/linux-master/drivers/media/platform/verisilicon/
H A Dhantro_h1_jpeg_enc.c19 struct hantro_ctx *ctx)
30 overfill_r = ctx->src_fmt.width - ctx->dst_fmt.width;
31 overfill_b = ctx->src_fmt.height - ctx->dst_fmt.height;
33 reg = H1_REG_IN_IMG_CTRL_ROW_LEN(ctx->src_fmt.width)
36 | H1_REG_IN_IMG_CTRL_FMT(ctx->vpu_src_fmt->enc_fmt);
41 struct hantro_ctx *ctx,
45 struct v4l2_pix_format_mplane *pix_fmt = &ctx->src_fmt;
49 size_left = vb2_plane_size(dst_buf, 0) - ctx
18 hantro_h1_set_src_img_ctrl(struct hantro_dev *vpu, struct hantro_ctx *ctx) argument
40 hantro_h1_jpeg_enc_set_buffers(struct hantro_dev *vpu, struct hantro_ctx *ctx, struct vb2_buffer *src_buf, struct vb2_buffer *dst_buf) argument
106 hantro_h1_jpeg_enc_run(struct hantro_ctx *ctx) argument
158 hantro_h1_jpeg_enc_done(struct hantro_ctx *ctx) argument
[all...]
H A Drockchip_vpu2_hw_jpeg_enc.c36 struct hantro_ctx *ctx)
47 overfill_r = ctx->src_fmt.width - ctx->dst_fmt.width;
48 overfill_b = ctx->src_fmt.height - ctx->dst_fmt.height;
50 reg = VEPU_REG_IN_IMG_CTRL_ROW_LEN(ctx->src_fmt.width);
63 reg = VEPU_REG_IN_IMG_CTRL_FMT(ctx->vpu_src_fmt->enc_fmt);
68 struct hantro_ctx *ctx,
72 struct v4l2_pix_format_mplane *pix_fmt = &ctx->src_fmt;
76 size_left = vb2_plane_size(dst_buf, 0) - ctx
35 rockchip_vpu2_set_src_img_ctrl(struct hantro_dev *vpu, struct hantro_ctx *ctx) argument
67 rockchip_vpu2_jpeg_enc_set_buffers(struct hantro_dev *vpu, struct hantro_ctx *ctx, struct vb2_buffer *src_buf, struct vb2_buffer *dst_buf) argument
132 rockchip_vpu2_jpeg_enc_run(struct hantro_ctx *ctx) argument
189 rockchip_vpu2_jpeg_enc_done(struct hantro_ctx *ctx) argument
[all...]
/linux-master/drivers/fpga/tests/
H A Dfpga-mgr-test.c194 struct mgr_ctx *ctx = test->priv; local
197 mgr = fpga_mgr_get(ctx->dev);
198 KUNIT_EXPECT_PTR_EQ(test, mgr, ctx->mgr);
200 fpga_mgr_put(ctx->mgr);
205 struct mgr_ctx *ctx = test->priv; local
208 ret = fpga_mgr_lock(ctx->mgr);
211 ret = fpga_mgr_lock(ctx->mgr);
214 fpga_mgr_unlock(ctx->mgr);
220 struct mgr_ctx *ctx = test->priv; local
226 ctx
248 struct mgr_ctx *ctx = test->priv; local
282 struct mgr_ctx *ctx; local
304 struct mgr_ctx *ctx = test->priv; local
[all...]
/linux-master/arch/arm64/net/
H A Dbpf_jit_comp.c97 static inline void emit(const u32 insn, struct jit_ctx *ctx) argument
99 if (ctx->image != NULL)
100 ctx->image[ctx->idx] = cpu_to_le32(insn);
102 ctx->idx++;
106 const s32 val, struct jit_ctx *ctx)
113 emit(A64_MOVN(is64, reg, (u16)~lo, 0), ctx); local
115 emit(A64_MOVN(is64, reg, (u16)~hi, 16), ctx); local
117 emit(A64_MOVK(is64, reg, lo, 0), ctx);
120 emit(A64_MOVZ(is64, reg, lo, 0), ctx);
105 emit_a64_mov_i(const int is64, const int reg, const s32 val, struct jit_ctx *ctx) argument
134 emit_a64_mov_i64(const int reg, const u64 val, struct jit_ctx *ctx) argument
159 emit_bti(u32 insn, struct jit_ctx *ctx) argument
170 emit_addr_mov_i64(const int reg, const u64 val, struct jit_ctx *ctx) argument
184 emit_call(u64 target, struct jit_ctx *ctx) argument
189 emit(A64_BLR(tmp), ctx); local
192 bpf2a64_offset(int bpf_insn, int off, const struct jit_ctx *ctx) argument
221 epilogue_offset(const struct jit_ctx *ctx) argument
301 build_prologue(struct jit_ctx *ctx, bool ebpf_from_cbpf, bool is_exception_cb, u64 arena_vm_start) argument
356 emit(A64_PUSH(A64_FP, A64_LR, A64_SP), ctx); local
360 emit(A64_PUSH(r6, r7, A64_SP), ctx); local
361 emit(A64_PUSH(r8, r9, A64_SP), ctx); local
362 emit(A64_PUSH(fp, tcc, A64_SP), ctx); local
363 emit(A64_PUSH(fpb, A64_R(28), A64_SP), ctx); local
427 emit_bpf_tail_call(struct jit_ctx *ctx) argument
446 emit(A64_LDR32(tmp, r2, tmp), ctx); local
449 emit(A64_B_(A64_COND_CS, jmp_offset), ctx); local
458 emit(A64_B_(A64_COND_CS, jmp_offset), ctx); local
469 emit(A64_LDR64(prg, tmp, prg), ctx); local
475 emit(A64_LDR64(tmp, prg, tmp), ctx); local
478 emit(A64_BR(tmp), ctx); local
494 emit_lse_atomic(const struct bpf_insn *insn, struct jit_ctx *ctx) argument
522 emit(A64_STADD(isdw, reg, src), ctx); local
525 emit(A64_MVN(isdw, tmp2, src), ctx); local
526 emit(A64_STCLR(isdw, reg, tmp2), ctx); local
529 emit(A64_STSET(isdw, reg, src), ctx); local
532 emit(A64_STEOR(isdw, reg, src), ctx); local
536 emit(A64_LDADDAL(isdw, src, reg, src), ctx); local
539 emit(A64_MVN(isdw, tmp2, src), ctx); local
540 emit(A64_LDCLRAL(isdw, src, reg, tmp2), ctx); local
543 emit(A64_LDSETAL(isdw, src, reg, src), ctx); local
546 emit(A64_LDEORAL(isdw, src, reg, src), ctx); local
550 emit(A64_SWPAL(isdw, src, reg, src), ctx); local
554 emit(A64_CASAL(isdw, src, reg, bpf2a64[BPF_REG_0]), ctx); local
564 emit_lse_atomic(const struct bpf_insn *insn, struct jit_ctx *ctx) argument
570 emit_ll_sc_atomic(const struct bpf_insn *insn, struct jit_ctx *ctx) argument
602 emit(A64_LDXR(isdw, tmp2, reg), ctx); local
604 emit(A64_ADD(isdw, tmp2, tmp2, src), ctx); local
606 emit(A64_AND(isdw, tmp2, tmp2, src), ctx); local
608 emit(A64_ORR(isdw, tmp2, tmp2, src), ctx); local
610 emit(A64_EOR(isdw, tmp2, tmp2, src), ctx); local
611 emit(A64_STXR(isdw, tmp2, reg, tmp3), ctx); local
622 emit(A64_MOV(isdw, ax, src), ctx); local
623 emit(A64_LDXR(isdw, src, reg), ctx); local
625 emit(A64_ADD(isdw, tmp2, src, ax), ctx); local
627 emit(A64_AND(isdw, tmp2, src, ax), ctx); local
629 emit(A64_ORR(isdw, tmp2, src, ax), ctx); local
631 emit(A64_EOR(isdw, tmp2, src, ax), ctx); local
632 emit(A64_STLXR(isdw, tmp2, reg, tmp3), ctx); local
639 emit(A64_MOV(isdw, tmp2, src), ctx); local
640 emit(A64_LDXR(isdw, src, reg), ctx); local
641 emit(A64_STLXR(isdw, tmp2, reg, tmp3), ctx); local
650 emit(A64_MOV(isdw, tmp2, r0), ctx); local
651 emit(A64_LDXR(isdw, r0, reg), ctx); local
652 emit(A64_EOR(isdw, tmp3, r0, tmp2), ctx); local
655 emit(A64_CBNZ(isdw, tmp3, jmp_offset), ctx); local
656 emit(A64_STLXR(isdw, src, reg, tmp3), ctx); local
698 build_plt(struct jit_ctx *ctx) argument
710 emit(A64_BR(tmp), ctx); local
716 build_epilogue(struct jit_ctx *ctx, bool is_exception_cb) argument
739 emit(A64_POP(fpb, A64_R(28), A64_SP), ctx); local
741 emit(A64_POP(fp, A64_R(26), A64_SP), ctx); local
744 emit(A64_POP(r8, r9, A64_SP), ctx); local
745 emit(A64_POP(r6, r7, A64_SP), ctx); local
748 emit(A64_POP(A64_FP, A64_LR, A64_SP), ctx); local
757 emit(A64_RET(A64_LR), ctx); local
777 add_exception_handler(const struct bpf_insn *insn, struct jit_ctx *ctx, int dst_reg) argument
856 build_insn(const struct bpf_insn *insn, struct jit_ctx *ctx, bool extra_pass) argument
897 emit(A64_MRS_TPIDR_EL2(tmp), ctx); local
899 emit(A64_MRS_TPIDR_EL1(tmp), ctx); local
905 emit(A64_MOV(is64, dst, src), ctx); local
908 emit(A64_SXTB(is64, dst, src), ctx); local
911 emit(A64_SXTH(is64, dst, src), ctx); local
914 emit(A64_SXTW(is64, dst, src), ctx); local
921 emit(A64_ADD(is64, dst, dst, src), ctx); local
925 emit(A64_SUB(is64, dst, dst, src), ctx); local
929 emit(A64_AND(is64, dst, dst, src), ctx); local
933 emit(A64_ORR(is64, dst, dst, src), ctx); local
937 emit(A64_EOR(is64, dst, dst, src), ctx); local
941 emit(A64_MUL(is64, dst, dst, src), ctx); local
946 emit(A64_UDIV(is64, dst, dst, src), ctx); local
948 emit(A64_SDIV(is64, dst, dst, src), ctx); local
953 emit(A64_UDIV(is64, tmp, dst, src), ctx); local
955 emit(A64_SDIV(is64, tmp, dst, src), ctx); local
956 emit(A64_MSUB(is64, dst, dst, tmp, src), ctx); local
960 emit(A64_LSLV(is64, dst, dst, src), ctx); local
964 emit(A64_LSRV(is64, dst, dst, src), ctx); local
968 emit(A64_ASRV(is64, dst, dst, src), ctx); local
973 emit(A64_NEG(is64, dst, dst), ctx); local
988 emit(A64_REV16(is64, dst, dst), ctx); local
990 emit(A64_UXTH(is64, dst, dst), ctx); local
997 emit(A64_REV64(dst, dst), ctx); local
1005 emit(A64_UXTH(is64, dst, dst), ctx); local
1009 emit(A64_UXTW(is64, dst, dst), ctx); local
1025 emit(A64_ADD_I(is64, dst, dst, imm), ctx); local
1030 emit(A64_ADD(is64, dst, dst, tmp), ctx); local
1036 emit(A64_SUB_I(is64, dst, dst, imm), ctx); local
1041 emit(A64_SUB(is64, dst, dst, tmp), ctx); local
1051 emit(A64_AND(is64, dst, dst, tmp), ctx); local
1061 emit(A64_ORR(is64, dst, dst, tmp), ctx); local
1071 emit(A64_EOR(is64, dst, dst, tmp), ctx); local
1077 emit(A64_MUL(is64, dst, dst, tmp), ctx); local
1083 emit(A64_UDIV(is64, dst, dst, tmp), ctx); local
1085 emit(A64_SDIV(is64, dst, dst, tmp), ctx); local
1091 emit(A64_UDIV(is64, tmp, dst, tmp2), ctx); local
1093 emit(A64_SDIV(is64, tmp, dst, tmp2), ctx); local
1094 emit(A64_MSUB(is64, dst, dst, tmp, tmp2), ctx); local
1098 emit(A64_LSL(is64, dst, dst, imm), ctx); local
1102 emit(A64_LSR(is64, dst, dst, imm), ctx); local
1106 emit(A64_ASR(is64, dst, dst, imm), ctx); local
1117 emit(A64_B(jmp_offset), ctx); local
1140 emit(A64_CMP(is64, dst, src), ctx); local
1179 emit(A64_B_(jmp_cond, jmp_offset), ctx); local
1183 emit(A64_TST(is64, dst, src), ctx); local
1207 emit(A64_CMP_I(is64, dst, imm), ctx); local
1212 emit(A64_CMP(is64, dst, tmp), ctx); local
1222 emit(A64_TST(is64, dst, tmp), ctx); local
1237 emit(A64_MRS_SP_EL0(tmp), ctx); local
1239 emit(A64_LDR32I(r0, tmp, cpu_offset), ctx); local
1242 emit(A64_LDR32(r0, tmp, tmp2), ctx); local
1268 emit(A64_B(jmp_offset), ctx); local
1323 emit(A64_LDRSWI(dst, src_adj, off_adj), ctx); local
1325 emit(A64_LDR32I(dst, src_adj, off_adj), ctx); local
1329 emit(A64_LDRSW(dst, src, tmp), ctx); local
1331 emit(A64_LDR32(dst, src, tmp), ctx); local
1337 emit(A64_LDRSHI(dst, src_adj, off_adj), ctx); local
1339 emit(A64_LDRHI(dst, src_adj, off_adj), ctx); local
1343 emit(A64_LDRSH(dst, src, tmp), ctx); local
1345 emit(A64_LDRH(dst, src, tmp), ctx); local
1351 emit(A64_LDRSBI(dst, src_adj, off_adj), ctx); local
1353 emit(A64_LDRBI(dst, src_adj, off_adj), ctx); local
1357 emit(A64_LDRSB(dst, src, tmp), ctx); local
1359 emit(A64_LDRB(dst, src, tmp), ctx); local
1364 emit(A64_LDR64I(dst, src_adj, off_adj), ctx); local
1367 emit(A64_LDR64(dst, src, tmp), ctx); local
1415 emit(A64_STR32I(tmp, dst_adj, off_adj), ctx); local
1418 emit(A64_STR32(tmp, dst, tmp2), ctx); local
1423 emit(A64_STRHI(tmp, dst_adj, off_adj), ctx); local
1426 emit(A64_STRH(tmp, dst, tmp2), ctx); local
1431 emit(A64_STRBI(tmp, dst_adj, off_adj), ctx); local
1434 emit(A64_STRB(tmp, dst, tmp2), ctx); local
1439 emit(A64_STR64I(tmp, dst_adj, off_adj), ctx); local
1442 emit(A64_STR64(tmp, dst, tmp2), ctx); local
1475 emit(A64_STR32I(src, dst_adj, off_adj), ctx); local
1478 emit(A64_STR32(src, dst, tmp), ctx); local
1483 emit(A64_STRHI(src, dst_adj, off_adj), ctx); local
1486 emit(A64_STRH(src, dst, tmp), ctx); local
1491 emit(A64_STRBI(src, dst_adj, off_adj), ctx); local
1494 emit(A64_STRB(src, dst, tmp), ctx); local
1499 emit(A64_STR64I(src, dst_adj, off_adj), ctx); local
1502 emit(A64_STR64(src, dst, tmp), ctx); local
1609 build_body(struct jit_ctx *ctx, bool extra_pass) argument
1650 validate_code(struct jit_ctx *ctx) argument
1663 validate_ctx(struct jit_ctx *ctx) argument
1683 struct jit_ctx ctx; member in struct:arm64_jit_data
1696 struct jit_ctx ctx; local
1906 invoke_bpf_prog(struct jit_ctx *ctx, struct bpf_tramp_link *l, int args_off, int retval_off, int run_ctx_off, bool save_ret) argument
1923 emit_a64_mov_i64(A64_R(10), l->cookie, ctx); local
1925 ctx); local
1931 emit_addr_mov_i64(A64_R(19), (const u64)p, ctx); local
1951 emit_addr_mov_i64(A64_R(1), (const u64)p->insnsi, ctx); local
1973 invoke_bpf_mod_ret(struct jit_ctx *ctx, struct bpf_tramp_links *tl, int args_off, int retval_off, int run_ctx_off, __le32 **branches) argument
1982 emit(A64_STR64I(A64_ZR, A64_SP, retval_off), ctx); local
1998 save_args(struct jit_ctx *ctx, int args_off, int nregs) argument
2003 emit(A64_STR64I(i, A64_SP, args_off), ctx); local
2008 restore_args(struct jit_ctx *ctx, int args_off, int nregs) argument
2013 emit(A64_LDR64I(i, A64_SP, args_off), ctx); local
2029 prepare_trampoline(struct jit_ctx *ctx, struct bpf_tramp_image *im, struct bpf_tramp_links *tlinks, void *func_addr, int nregs, u32 flags) argument
2116 emit(A64_PUSH(A64_FP, A64_R(9), A64_SP), ctx); local
2120 emit(A64_PUSH(A64_FP, A64_LR, A64_SP), ctx); local
2128 emit_addr_mov_i64(A64_R(10), (const u64)func_addr, ctx); local
2144 emit_addr_mov_i64(A64_R(0), (const u64)im, ctx); local
2167 emit(A64_ADR(A64_LR, AARCH64_INSN_SIZE * 2), ctx); local
2188 emit_addr_mov_i64(A64_R(0), (const u64)im, ctx); local
2206 emit(A64_POP(A64_FP, A64_LR, A64_SP), ctx); local
2207 emit(A64_POP(A64_FP, A64_R(9), A64_SP), ctx); local
2243 struct jit_ctx ctx = { local
2293 struct jit_ctx ctx = { local
[all...]
/linux-master/arch/x86/crypto/
H A Dtwofish.h10 asmlinkage void twofish_enc_blk(const void *ctx, u8 *dst, const u8 *src);
11 asmlinkage void twofish_dec_blk(const void *ctx, u8 *dst, const u8 *src);
14 asmlinkage void __twofish_enc_blk_3way(const void *ctx, u8 *dst, const u8 *src,
16 asmlinkage void twofish_dec_blk_3way(const void *ctx, u8 *dst, const u8 *src);
19 extern void twofish_dec_blk_cbc_3way(const void *ctx, u8 *dst, const u8 *src);
/linux-master/tools/testing/selftests/bpf/progs/
H A Dtest_xdp_with_cpumap_helpers.c16 int xdp_redir_prog(struct xdp_md *ctx) argument
22 int xdp_dummy_prog(struct xdp_md *ctx) argument
28 int xdp_dummy_cm(struct xdp_md *ctx) argument
30 if (ctx->ingress_ifindex == IFINDEX_LO)
37 int xdp_dummy_cm_frags(struct xdp_md *ctx) argument
H A Dtest_attach_probe_manual.c18 int handle_kprobe(struct pt_regs *ctx) argument
25 int handle_kretprobe(struct pt_regs *ctx) argument
32 int handle_uprobe(struct pt_regs *ctx) argument
39 int handle_uretprobe(struct pt_regs *ctx) argument
46 int handle_uprobe_byname(struct pt_regs *ctx) argument
/linux-master/drivers/hid/bpf/progs/
H A DHP__Elite-Presenter.bpf.c49 int probe(struct hid_bpf_probe_args *ctx) argument
51 ctx->retval = ctx->rdesc_size != 264;
52 if (ctx->retval)
53 ctx->retval = -EINVAL;
/linux-master/drivers/crypto/caam/
H A Dcaamrng.c118 static void caam_rng_fill_async(struct caam_rng_ctx *ctx) argument
125 nents = kfifo_dma_in_prepare(&ctx->fifo, sg, ARRAY_SIZE(sg),
130 len = caam_rng_read_one(ctx->jrdev, sg_virt(&sg[0]),
132 ctx->desc_async,
137 kfifo_dma_in_finish(&ctx->fifo, len);
142 struct caam_rng_ctx *ctx = container_of(work, struct caam_rng_ctx, local
144 caam_rng_fill_async(ctx);
149 struct caam_rng_ctx *ctx = to_caam_rng_ctx(rng); local
155 return caam_rng_read_one(ctx->jrdev, dst, max,
156 ctx
168 struct caam_rng_ctx *ctx = to_caam_rng_ctx(rng); local
180 struct caam_rng_ctx *ctx = to_caam_rng_ctx(rng); local
221 struct caam_rng_ctx *ctx = to_caam_rng_ctx(rng); local
267 struct caam_rng_ctx *ctx; local
[all...]

Completed in 513 milliseconds

1234567891011>>