Searched refs:context (Results 51 - 75 of 2081) sorted by relevance

1234567891011>>

/linux-master/drivers/gpu/drm/tegra/
H A Dsubmit.c26 #define SUBMIT_ERR(context, fmt, ...) \
27 dev_err_ratelimited(context->client->base.dev, \
146 tegra_drm_mapping_get(struct tegra_drm_context *context, u32 id) argument
150 xa_lock(&context->mappings);
152 mapping = xa_load(&context->mappings, id);
156 xa_unlock(&context->mappings);
180 struct tegra_drm_context *context,
187 SUBMIT_ERR(context, "gather_data_words cannot be zero");
192 SUBMIT_ERR(context, "gather_data_words is too large");
198 SUBMIT_ERR(context, "faile
179 submit_copy_gather_data(struct gather_bo **pbo, struct device *dev, struct tegra_drm_context *context, struct drm_tegra_channel_submit *args) argument
228 submit_write_reloc(struct tegra_drm_context *context, struct gather_bo *bo, struct drm_tegra_submit_buf *buf, struct tegra_drm_mapping *mapping) argument
257 submit_process_bufs(struct tegra_drm_context *context, struct gather_bo *bo, struct drm_tegra_channel_submit *args, struct tegra_drm_submit_data *job_data) argument
327 submit_get_syncpt(struct tegra_drm_context *context, struct host1x_job *job, struct xarray *syncpoints, struct drm_tegra_channel_submit *args) argument
350 submit_job_add_gather(struct host1x_job *job, struct tegra_drm_context *context, struct drm_tegra_submit_cmd_gather_uptr *cmd, struct gather_bo *bo, u32 *offset, struct tegra_drm_submit_data *job_data, u32 *class) argument
393 submit_create_job(struct tegra_drm_context *context, struct gather_bo *bo, struct drm_tegra_channel_submit *args, struct tegra_drm_submit_data *job_data, struct xarray *syncpoints) argument
516 struct tegra_drm_context *context; local
[all...]
/linux-master/drivers/gpu/drm/amd/display/dc/hwss/dcn32/
H A Ddcn32_hwseq.h48 void dcn32_commit_subvp_config(struct dc *dc, struct dc_state *context);
66 void dcn32_program_mall_pipe_config(struct dc *dc, struct dc_state *context);
68 void dcn32_update_mall_sel(struct dc *dc, struct dc_state *context);
70 void dcn32_update_force_pstate(struct dc *dc, struct dc_state *context);
72 void dcn32_update_odm(struct dc *dc, struct dc_state *context, struct pipe_ctx *pipe_ctx);
78 void dcn32_resync_fifo_dccg_dio(struct dce_hwseq *hws, struct dc *dc, struct dc_state *context);
81 struct dc_state *context,
99 struct dc_state *context,
109 struct dc_state *context,
112 void dcn32_enable_phantom_streams(struct dc *dc, struct dc_state *context);
[all...]
/linux-master/fs/xfs/
H A Dxfs_xattr.c201 struct xfs_attr_list_context *context,
210 if (context->count < 0 || context->seen_enough)
213 if (!context->buffer)
216 arraytop = context->count + prefix_len + namelen + 1;
217 if (arraytop > context->firstu) {
218 context->count = -1; /* insufficient space */
219 context->seen_enough = 1;
222 offset = context->buffer + context
200 __xfs_xattr_put_listent( struct xfs_attr_list_context *context, char *prefix, int prefix_len, unsigned char *name, int namelen) argument
235 xfs_xattr_put_listent( struct xfs_attr_list_context *context, int flags, unsigned char *name, int namelen, int valuelen) argument
296 struct xfs_attr_list_context context; local
[all...]
/linux-master/drivers/infiniband/hw/hns/
H A Dhns_roce_cmd.c93 struct hns_roce_cmd_context *context = local
94 &hr_dev->cmd.context[token % hr_dev->cmd.max_cmds];
96 if (unlikely(token != context->token)) {
98 "[cmd] invalid ae token 0x%x, context token is 0x%x.\n",
99 token, context->token);
103 context->result = (status == HNS_ROCE_CMD_SUCCESS) ? 0 : (-EIO);
104 context->out_param = out_param;
105 complete(&context->done);
113 struct hns_roce_cmd_context *context; local
120 context
[all...]
/linux-master/drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/
H A Ddce110_clk_mgr.c92 uint32_t dce110_get_min_vblank_time_us(const struct dc_state *context) argument
97 for (j = 0; j < context->stream_count; j++) {
98 struct dc_stream_state *stream = context->streams[j];
120 const struct dc_state *context,
126 for (j = 0; j < context->stream_count; j++) {
129 const struct dc_stream_state *stream = context->streams[j];
135 if (stream == context->res_ctx.pipe_ctx[k].stream) {
136 pipe_ctx = &context->res_ctx.pipe_ctx[k];
174 struct dc_state *context)
176 struct dm_pp_display_configuration *pp_display_cfg = &context
119 dce110_fill_display_configs( const struct dc_state *context, struct dm_pp_display_configuration *pp_display_cfg) argument
172 dce11_pplib_apply_display_requirements( struct dc *dc, struct dc_state *context) argument
249 dce11_update_clocks(struct clk_mgr *clk_mgr_base, struct dc_state *context, bool safe_to_lower) argument
[all...]
/linux-master/drivers/gpu/drm/amd/display/dc/dml/dcn20/
H A Ddcn20_fpu.h36 struct dc_state *context,
40 struct dc_state *context,
45 struct dc_state *context,
49 struct dc_state *context,
64 bool dcn20_validate_bandwidth_fp(struct dc *dc, struct dc_state *context,
76 struct dc_state *context,
79 bool dcn21_validate_bandwidth_fp(struct dc *dc, struct dc_state *context, bool
/linux-master/drivers/md/dm-vdo/
H A Daction-manager.h14 * as the block map or slab depot). Each action manager is tied to a specific context for which it
15 * manages actions. The manager ensures that only one action is active on that context at a time,
38 * @context: The object which holds the per-zone context for the action
42 typedef void (*vdo_zone_action_fn)(void *context, zone_count_t zone_number,
48 * @context: The object which holds the per-zone context for the action
51 typedef void (*vdo_action_preamble_fn)(void *context, struct vdo_completion *parent);
56 * @context: The object which holds the per-zone context fo
[all...]
/linux-master/drivers/gpu/drm/amd/display/dc/dml/dcn30/
H A Ddcn30_fpu.c365 void dcn30_fpu_update_soc_for_wm_a(struct dc *dc, struct dc_state *context) argument
371 if (!context->bw_ctx.bw.dcn.clk.fw_based_mclk_switching ||
372 context->bw_ctx.dml.soc.dram_clock_change_latency_us == 0)
373 context->bw_ctx.dml.soc.dram_clock_change_latency_us = dc->clk_mgr->bw_params->wm_table.nv_entries[WM_A].dml_input.pstate_latency_us;
374 context->bw_ctx.dml.soc.sr_enter_plus_exit_time_us = dc->clk_mgr->bw_params->wm_table.nv_entries[WM_A].dml_input.sr_enter_plus_exit_time_us;
375 context->bw_ctx.dml.soc.sr_exit_time_us = dc->clk_mgr->bw_params->wm_table.nv_entries[WM_A].dml_input.sr_exit_time_us;
380 struct dc *dc, struct dc_state *context,
385 int maxMpcComb = context->bw_ctx.dml.vba.maxMpcComb;
387 double dcfclk = context->bw_ctx.dml.vba.DCFCLKState[vlevel][maxMpcComb];
388 bool pstate_en = context
379 dcn30_fpu_calculate_wm_and_dlg( struct dc *dc, struct dc_state *context, display_e2e_pipe_params_st *pipes, int pipe_cnt, int vlevel) argument
691 dcn30_find_dummy_latency_index_for_fw_based_mclk_switch(struct dc *dc, struct dc_state *context, display_e2e_pipe_params_st *pipes, int pipe_cnt, int vlevel) argument
[all...]
/linux-master/arch/microblaze/include/asm/
H A Dmmu_context_mm.h22 * segment IDs). We use a skew on both the context and the high 4 bits
41 * Set the current MMU context.
50 extern void set_context(mm_context_t context, pgd_t *pgd);
59 * This caches the next context number that we expect to be free.
60 * Its use is an optimization only, we can't rely on this context
75 * Get a new mmu context for the address space described by `mm'.
81 if (mm->context != NO_CONTEXT)
92 mm->context = ctx;
97 * Set up the context for a new address space.
99 # define init_new_context(tsk, mm) (((mm)->context
[all...]
/linux-master/drivers/infiniband/hw/mlx4/
H A Ddoorbell.c50 struct mlx4_ib_ucontext *context = rdma_udata_to_drv_context( local
53 mutex_lock(&context->db_page_mutex);
55 list_for_each_entry(page, &context->db_page_list, list)
67 page->umem = ib_umem_get(context->ibucontext.device, virt & PAGE_MASK,
75 list_add(&page->list, &context->db_page_list);
84 mutex_unlock(&context->db_page_mutex);
89 void mlx4_ib_db_unmap_user(struct mlx4_ib_ucontext *context, struct mlx4_db *db) argument
91 mutex_lock(&context->db_page_mutex);
99 mutex_unlock(&context->db_page_mutex);
/linux-master/drivers/infiniband/hw/mlx5/
H A Ddoorbell.c48 int mlx5_ib_db_map_user(struct mlx5_ib_ucontext *context, unsigned long virt, argument
54 mutex_lock(&context->db_page_mutex);
56 list_for_each_entry(page, &context->db_page_list, list)
69 page->umem = ib_umem_get(context->ibucontext.device, virt & PAGE_MASK,
79 list_add(&page->list, &context->db_page_list);
88 mutex_unlock(&context->db_page_mutex);
93 void mlx5_ib_db_unmap_user(struct mlx5_ib_ucontext *context, struct mlx5_db *db) argument
95 mutex_lock(&context->db_page_mutex);
104 mutex_unlock(&context->db_page_mutex);
/linux-master/drivers/gpu/drm/amd/display/dc/resource/dcn30/
H A Ddcn30_resource.h50 struct dc_state *context,
59 bool dcn30_validate_bandwidth(struct dc *dc, struct dc_state *context,
63 struct dc_state *context,
70 struct dc *dc, struct dc_state *context,
74 void dcn30_update_soc_for_wm_a(struct dc *dc, struct dc_state *context);
79 struct dc *dc, struct dc_state *context,
103 bool dcn30_can_support_mclk_switch_using_fw_based_vblank_stretch(struct dc *dc, struct dc_state *context);
104 void dcn30_setup_mclk_switch_using_fw_based_vblank_stretch(struct dc *dc, struct dc_state *context);
105 int dcn30_find_dummy_latency_index_for_fw_based_mclk_switch(struct dc *dc, struct dc_state *context,
/linux-master/arch/powerpc/mm/book3s64/
H A Dmmu_context.c3 * MMU context allocation for 64-bit kernels.
39 WARN(result != id, "mmu: Failed to reserve context id %d (rc %d)\n", id, result);
99 mm->context.hash_context = kmalloc(sizeof(struct hash_mm_context),
101 if (!mm->context.hash_context)
110 * explicitly against context.id == 0. This ensures that we properly
111 * initialize context slice details for newly allocated mm's (which will
112 * have id == 0) and don't alter context slice inherited via fork (which
118 if (mm->context.id == 0) {
119 memset(mm->context.hash_context, 0, sizeof(struct hash_mm_context));
123 memcpy(mm->context
[all...]
/linux-master/include/sound/
H A Dtas2781-dsp.h172 void tasdevice_select_cfg_blk(void *context, int conf_no,
174 void tasdevice_config_info_remove(void *context);
175 void tasdevice_dsp_remove(void *context);
176 int tasdevice_dsp_parser(void *context);
177 int tasdevice_rca_parser(void *context, const struct firmware *fmw);
178 void tasdevice_dsp_remove(void *context);
179 void tasdevice_calbin_remove(void *context);
180 int tasdevice_select_tuningprm_cfg(void *context, int prm,
182 int tasdevice_prmg_load(void *context, int prm_no);
183 int tasdevice_prmg_calibdata_load(void *context, in
[all...]
/linux-master/tools/testing/selftests/bpf/progs/
H A Duser_ringbuf_fail.c30 bad_access1(struct bpf_dynptr *dynptr, void *context) argument
53 bad_access2(struct bpf_dynptr *dynptr, void *context) argument
76 write_forbidden(struct bpf_dynptr *dynptr, void *context) argument
96 null_context_write(struct bpf_dynptr *dynptr, void *context) argument
98 *((__u64 *)context) = 0;
116 null_context_read(struct bpf_dynptr *dynptr, void *context) argument
118 __u64 id = *((__u64 *)context);
138 try_discard_dynptr(struct bpf_dynptr *dynptr, void *context) argument
158 try_submit_dynptr(struct bpf_dynptr *dynptr, void *context) argument
178 invalid_drain_callback_return(struct bpf_dynptr *dynptr, void *context) argument
196 try_reinit_dynptr_mem(struct bpf_dynptr *dynptr, void *context) argument
203 try_reinit_dynptr_ringbuf(struct bpf_dynptr *dynptr, void *context) argument
[all...]
/linux-master/drivers/net/ethernet/qlogic/qed/
H A Dqed_nvmetcp_fw_funcs.c129 init_nvmetcp_task_params(struct e5_nvmetcp_task_context *context, argument
133 context->ystorm_st_context.state.cccid = task_params->host_cccid;
134 SET_FIELD(context->ustorm_st_context.error_flags, USTORM_NVMETCP_TASK_ST_CTX_NVME_TCP, 1);
135 context->ustorm_st_context.nvme_tcp_opaque_lo = cpu_to_le32(task_params->opq.lo);
136 context->ustorm_st_context.nvme_tcp_opaque_hi = cpu_to_le32(task_params->opq.hi);
145 struct e5_nvmetcp_task_context *context = task_params->context; local
146 const u8 val_byte = context->mstorm_ag_context.cdu_validation;
149 memset(context, 0, sizeof(*context));
206 set_local_completion_context(struct e5_nvmetcp_task_context *context) argument
221 struct e5_nvmetcp_task_context *context = task_params->context; local
327 struct e5_nvmetcp_task_context *context = task_params->context; local
[all...]
/linux-master/drivers/media/usb/as102/
H A Das10x_cmd_cfg.c16 * as10x_cmd_get_context - Send get context command to AS10x
18 * @tag: context tag
19 * @pvalue: pointer where to store context value read
34 sizeof(pcmd->body.context.req));
37 pcmd->body.context.req.proc_id = cpu_to_le16(CONTROL_PROC_CONTEXT);
38 pcmd->body.context.req.tag = cpu_to_le16(tag);
39 pcmd->body.context.req.type = cpu_to_le16(GET_CONTEXT_DATA);
45 sizeof(pcmd->body.context.req)
48 sizeof(prsp->body.context.rsp)
57 /* parse response: context comman
[all...]
/linux-master/crypto/
H A Drsa_helper.c16 int rsa_get_n(void *context, size_t hdrlen, unsigned char tag, argument
19 struct rsa_key *key = context;
46 int rsa_get_e(void *context, size_t hdrlen, unsigned char tag, argument
49 struct rsa_key *key = context;
61 int rsa_get_d(void *context, size_t hdrlen, unsigned char tag, argument
64 struct rsa_key *key = context;
76 int rsa_get_p(void *context, size_t hdrlen, unsigned char tag, argument
79 struct rsa_key *key = context;
91 int rsa_get_q(void *context, size_t hdrlen, unsigned char tag, argument
94 struct rsa_key *key = context;
106 rsa_get_dp(void *context, size_t hdrlen, unsigned char tag, const void *value, size_t vlen) argument
121 rsa_get_dq(void *context, size_t hdrlen, unsigned char tag, const void *value, size_t vlen) argument
136 rsa_get_qinv(void *context, size_t hdrlen, unsigned char tag, const void *value, size_t vlen) argument
[all...]
/linux-master/drivers/gpu/drm/amd/pm/powerplay/inc/
H A Dppinterrupt.h41 void *context; /* Pointer to callback function context */ member in struct:pp_interrupt_registration_info
/linux-master/arch/riscv/include/asm/
H A Dkvm_vcpu_fp.h18 void __kvm_riscv_fp_f_save(struct kvm_cpu_context *context);
19 void __kvm_riscv_fp_f_restore(struct kvm_cpu_context *context);
20 void __kvm_riscv_fp_d_save(struct kvm_cpu_context *context);
21 void __kvm_riscv_fp_d_restore(struct kvm_cpu_context *context);
/linux-master/drivers/gpu/drm/amd/display/dc/dml/dcn314/
H A Ddcn314_fpu.h36 int dcn314_populate_dml_pipes_from_context_fpu(struct dc *dc, struct dc_state *context,
/linux-master/drivers/gpu/drm/i915/
H A Di915_config.h15 u64 context);
/linux-master/include/linux/
H A Ddm-kcopyd.h65 void *context);
69 unsigned int flags, dm_kcopyd_notify_fn fn, void *context);
75 * It must not be called from interrupt context.
79 * It may be called from interrupt context.
83 dm_kcopyd_notify_fn fn, void *context);
88 unsigned int flags, dm_kcopyd_notify_fn fn, void *context);
/linux-master/drivers/gpu/drm/amd/display/dc/dml/dcn31/
H A Ddcn31_fpu.h37 void dcn31_update_soc_for_wm_a(struct dc *dc, struct dc_state *context);
38 void dcn315_update_soc_for_wm_a(struct dc *dc, struct dc_state *context);
41 struct dc *dc, struct dc_state *context,
55 struct dc_state *context,
/linux-master/arch/x86/include/asm/
H A Dinit.h9 void *context; /* context for alloc_pgt_page */ member in struct:x86_mapping_info

Completed in 223 milliseconds

1234567891011>>