Searched refs:guc (Results 1 - 25 of 88) sorted by relevance

1234

/linux-master/drivers/gpu/drm/i915/gt/uc/
H A Dintel_guc_rc.h11 void intel_guc_rc_init_early(struct intel_guc *guc);
13 static inline bool intel_guc_rc_is_supported(struct intel_guc *guc) argument
15 return guc->rc_supported;
18 static inline bool intel_guc_rc_is_wanted(struct intel_guc *guc) argument
20 return guc->submission_selected && intel_guc_rc_is_supported(guc);
23 static inline bool intel_guc_rc_is_used(struct intel_guc *guc) argument
25 return intel_guc_submission_is_used(guc) && intel_guc_rc_is_wanted(guc);
28 int intel_guc_rc_enable(struct intel_guc *guc);
[all...]
H A Dintel_guc_rc.c13 static bool __guc_rc_supported(struct intel_guc *guc) argument
16 return guc->submission_supported &&
17 GRAPHICS_VER(guc_to_i915(guc)) >= 12;
20 static bool __guc_rc_selected(struct intel_guc *guc) argument
22 if (!intel_guc_rc_is_supported(guc))
25 return guc->submission_selected;
28 void intel_guc_rc_init_early(struct intel_guc *guc) argument
30 guc->rc_supported = __guc_rc_supported(guc);
31 guc
34 guc_action_control_gucrc(struct intel_guc *guc, bool enable) argument
50 __guc_rc_control(struct intel_guc *guc, bool enable) argument
73 intel_guc_rc_enable(struct intel_guc *guc) argument
78 intel_guc_rc_disable(struct intel_guc *guc) argument
[all...]
H A Dintel_guc_debugfs.c19 struct intel_guc *guc = m->private; local
22 if (!intel_guc_is_supported(guc))
25 intel_guc_load_status(guc, &p);
27 intel_guc_log_info(&guc->log, &p);
29 if (!intel_guc_submission_is_used(guc))
32 intel_guc_ct_print_info(&guc->ct, &p);
33 intel_guc_submission_print_info(guc, &p);
34 intel_guc_ads_print_policy_info(guc, &p);
42 struct intel_guc *guc = m->private; local
45 if (!intel_guc_submission_is_used(guc))
56 struct intel_guc *guc = m->private; local
69 struct intel_guc *guc = (struct intel_guc *)data; local
76 struct intel_guc *guc = data; local
88 struct intel_guc *guc = data; local
104 struct intel_guc *guc = data; local
115 struct intel_guc *guc = data; local
132 intel_guc_debugfs_register(struct intel_guc *guc, struct dentry *root) argument
[all...]
H A Dintel_guc_ads.h16 int intel_guc_ads_create(struct intel_guc *guc);
17 void intel_guc_ads_destroy(struct intel_guc *guc);
18 void intel_guc_ads_init_late(struct intel_guc *guc);
19 void intel_guc_ads_reset(struct intel_guc *guc);
20 void intel_guc_ads_print_policy_info(struct intel_guc *guc,
23 u32 intel_guc_engine_usage_offset(struct intel_guc *guc);
H A Dintel_guc_submission.h16 void intel_guc_submission_init_early(struct intel_guc *guc);
17 int intel_guc_submission_init(struct intel_guc *guc);
18 int intel_guc_submission_enable(struct intel_guc *guc);
19 void intel_guc_submission_disable(struct intel_guc *guc);
20 void intel_guc_submission_fini(struct intel_guc *guc);
21 int intel_guc_preempt_work_create(struct intel_guc *guc);
22 void intel_guc_preempt_work_destroy(struct intel_guc *guc);
24 void intel_guc_submission_print_info(struct intel_guc *guc,
26 void intel_guc_submission_print_context_info(struct intel_guc *guc,
36 int intel_guc_wait_for_pending_msg(struct intel_guc *guc,
43 intel_guc_submission_is_supported(struct intel_guc *guc) argument
48 intel_guc_submission_is_wanted(struct intel_guc *guc) argument
53 intel_guc_submission_is_used(struct intel_guc *guc) argument
[all...]
H A Dintel_guc.h97 void (*reset)(struct intel_guc *guc);
98 void (*enable)(struct intel_guc *guc);
99 void (*disable)(struct intel_guc *guc);
296 * @last_dead_guc_jiffies: timestamp of previous 'dead guc' occurrance
325 #define GUC_SUBMIT_VER(guc) MAKE_GUC_VER_STRUCT((guc)->submission_version)
326 #define GUC_FIRMWARE_VER(guc) MAKE_GUC_VER_STRUCT((guc)->fw.file_selected.ver)
334 inline int intel_guc_send(struct intel_guc *guc, const u32 *action, u32 len) argument
336 return intel_guc_ct_send(&guc
340 intel_guc_send_nb(struct intel_guc *guc, const u32 *action, u32 len, u32 g2h_len_dw) argument
348 intel_guc_send_and_receive(struct intel_guc *guc, const u32 *action, u32 len, u32 *response_buf, u32 response_buf_size) argument
355 intel_guc_send_busy_loop(struct intel_guc *guc, const u32 *action, u32 len, u32 g2h_len_dw, bool loop) argument
392 intel_guc_to_host_event_handler(struct intel_guc *guc) argument
414 intel_guc_ggtt_offset(struct intel_guc *guc, struct i915_vma *vma) argument
445 intel_guc_is_supported(struct intel_guc *guc) argument
450 intel_guc_is_wanted(struct intel_guc *guc) argument
455 intel_guc_is_used(struct intel_guc *guc) argument
461 intel_guc_is_fw_running(struct intel_guc *guc) argument
466 intel_guc_is_ready(struct intel_guc *guc) argument
471 intel_guc_reset_interrupts(struct intel_guc *guc) argument
476 intel_guc_enable_interrupts(struct intel_guc *guc) argument
481 intel_guc_disable_interrupts(struct intel_guc *guc) argument
486 intel_guc_sanitize(struct intel_guc *guc) argument
496 intel_guc_enable_msg(struct intel_guc *guc, u32 mask) argument
503 intel_guc_disable_msg(struct intel_guc *guc, u32 mask) argument
[all...]
H A Dintel_guc_fw.h11 int intel_guc_fw_upload(struct intel_guc *guc);
H A Dintel_guc_debugfs.h12 void intel_guc_debugfs_register(struct intel_guc *guc, struct dentry *root);
H A Dintel_guc.c42 void intel_guc_notify(struct intel_guc *guc) argument
44 struct intel_gt *gt = guc_to_gt(guc);
52 intel_uncore_write(gt->uncore, guc->notify_reg, GUC_SEND_TRIGGER);
55 static inline i915_reg_t guc_send_reg(struct intel_guc *guc, u32 i) argument
57 GEM_BUG_ON(!guc->send_regs.base);
58 GEM_BUG_ON(!guc->send_regs.count);
59 GEM_BUG_ON(i >= guc->send_regs.count);
61 return _MMIO(guc->send_regs.base + 4 * i);
64 void intel_guc_init_send_regs(struct intel_guc *guc) argument
66 struct intel_gt *gt = guc_to_gt(guc);
81 gen9_reset_guc_interrupts(struct intel_guc *guc) argument
92 gen9_enable_guc_interrupts(struct intel_guc *guc) argument
107 gen9_disable_guc_interrupts(struct intel_guc *guc) argument
132 gen11_reset_guc_interrupts(struct intel_guc *guc) argument
141 gen11_enable_guc_interrupts(struct intel_guc *guc) argument
152 gen11_disable_guc_interrupts(struct intel_guc *guc) argument
164 struct intel_guc *guc = container_of(w, struct intel_guc, dead_guc_worker); local
177 intel_guc_init_early(struct intel_guc *guc) argument
221 intel_guc_init_late(struct intel_guc *guc) argument
226 guc_ctl_debug_flags(struct intel_guc *guc) argument
240 guc_ctl_feature_flags(struct intel_guc *guc) argument
253 guc_ctl_log_params_flags(struct intel_guc *guc) argument
274 guc_ctl_ads_flags(struct intel_guc *guc) argument
282 guc_ctl_wa_flags(struct intel_guc *guc) argument
331 guc_ctl_devid(struct intel_guc *guc) argument
343 guc_init_params(struct intel_guc *guc) argument
366 intel_guc_write_params(struct intel_guc *guc) argument
386 intel_guc_dump_time_info(struct intel_guc *guc, struct drm_printer *p) argument
403 intel_guc_init(struct intel_guc *guc) argument
470 intel_guc_fini(struct intel_guc *guc) argument
494 intel_guc_send_mmio(struct intel_guc *guc, const u32 *request, u32 len, u32 *response_buf, u32 response_buf_size) argument
601 intel_guc_crash_process_msg(struct intel_guc *guc, u32 action) argument
615 intel_guc_to_host_process_recv_msg(struct intel_guc *guc, const u32 *payload, u32 len) argument
648 intel_guc_auth_huc(struct intel_guc *guc, u32 rsa_offset) argument
662 intel_guc_suspend(struct intel_guc *guc) argument
702 intel_guc_resume(struct intel_guc *guc) argument
760 intel_guc_allocate_vma(struct intel_guc *guc, u32 size) argument
817 intel_guc_allocate_and_map_vma(struct intel_guc *guc, u32 size, struct i915_vma **out_vma, void **out_vaddr) argument
841 __guc_action_self_cfg(struct intel_guc *guc, u16 key, u16 len, u64 value) argument
870 __guc_self_cfg(struct intel_guc *guc, u16 key, u16 len, u64 value) argument
880 intel_guc_self_cfg32(struct intel_guc *guc, u16 key, u32 value) argument
885 intel_guc_self_cfg64(struct intel_guc *guc, u16 key, u64 value) argument
897 intel_guc_load_status(struct intel_guc *guc, struct drm_printer *p) argument
934 intel_guc_write_barrier(struct intel_guc *guc) argument
[all...]
H A Dintel_guc_ads.c80 static u32 guc_ads_regset_size(struct intel_guc *guc) argument
82 GEM_BUG_ON(!guc->ads_regset_size);
83 return guc->ads_regset_size;
86 static u32 guc_ads_golden_ctxt_size(struct intel_guc *guc) argument
88 return PAGE_ALIGN(guc->ads_golden_ctxt_size);
91 static u32 guc_ads_capture_size(struct intel_guc *guc) argument
93 return PAGE_ALIGN(guc->ads_capture_size);
96 static u32 guc_ads_private_data_size(struct intel_guc *guc) argument
98 return PAGE_ALIGN(guc->fw.private_data_size);
101 static u32 guc_ads_regset_offset(struct intel_guc *guc) argument
106 guc_ads_golden_ctxt_offset(struct intel_guc *guc) argument
116 guc_ads_capture_offset(struct intel_guc *guc) argument
126 guc_ads_private_data_offset(struct intel_guc *guc) argument
136 guc_ads_blob_size(struct intel_guc *guc) argument
142 guc_policies_init(struct intel_guc *guc) argument
160 intel_guc_ads_print_policy_info(struct intel_guc *guc, struct drm_printer *dp) argument
175 guc_action_policies_update(struct intel_guc *guc, u32 policy_offset) argument
185 intel_guc_global_policies_update(struct intel_guc *guc) argument
414 guc_mmio_reg_state_create(struct intel_guc *guc) argument
446 guc_mmio_reg_state_init(struct intel_guc *guc) argument
510 guc_prep_golden_context(struct intel_guc *guc) argument
599 guc_init_golden_context(struct intel_guc *guc) argument
685 guc_capture_prep_lists(struct intel_guc *guc) argument
799 __guc_ads_init(struct intel_guc *guc) argument
860 intel_guc_ads_create(struct intel_guc *guc) argument
907 intel_guc_ads_init_late(struct intel_guc *guc) argument
919 intel_guc_ads_destroy(struct intel_guc *guc) argument
926 guc_ads_private_data_reset(struct intel_guc *guc) argument
946 intel_guc_ads_reset(struct intel_guc *guc) argument
956 intel_guc_engine_usage_offset(struct intel_guc *guc) argument
964 struct intel_guc *guc = &engine->gt->uc.guc; local
[all...]
H A Dintel_uc.c90 gt_dbg(gt, "enable_guc=%d (guc:%s submission:%s huc:%s slpc:%s)\n",
123 intel_guc_init_early(&uc->guc);
137 intel_guc_init_late(&uc->guc);
154 intel_guc_init_send_regs(&uc->guc);
159 struct intel_guc *guc = &uc->guc; local
161 if (guc->log.vma && !uc->load_err_log)
162 uc->load_err_log = i915_gem_object_get(guc->log.vma->obj);
183 * communication channel with guc is turned off at this point, we can save the
186 static void guc_clear_mmio_msg(struct intel_guc *guc) argument
191 guc_get_mmio_msg(struct intel_guc *guc) argument
210 guc_handle_mmio_msg(struct intel_guc *guc) argument
223 guc_enable_communication(struct intel_guc *guc) argument
255 guc_disable_communication(struct intel_guc *guc) argument
320 struct intel_guc *guc = &uc->guc; local
354 struct intel_guc *guc = &uc->guc; local
460 struct intel_guc *guc = &uc->guc; local
589 struct intel_guc *guc = &uc->guc; local
608 struct intel_guc *guc = &uc->guc; local
629 struct intel_guc *guc = &uc->guc; local
638 struct intel_guc *guc = &uc->guc; local
653 struct intel_guc *guc = &uc->guc; local
662 struct intel_guc *guc = &uc->guc; local
683 struct intel_guc *guc = &uc->guc; local
714 struct intel_guc *guc = &uc->guc; local
[all...]
H A Dintel_guc_capture.h26 void intel_guc_capture_process(struct intel_guc *guc);
27 int intel_guc_capture_getlist(struct intel_guc *guc, u32 owner, u32 type, u32 classid,
29 int intel_guc_capture_getlistsize(struct intel_guc *guc, u32 owner, u32 type, u32 classid,
31 int intel_guc_capture_getnullheader(struct intel_guc *guc, void **outptr, size_t *size);
32 void intel_guc_capture_destroy(struct intel_guc *guc);
33 int intel_guc_capture_init(struct intel_guc *guc);
H A Dintel_guc_hwconfig.c34 static int __guc_action_get_hwconfig(struct intel_guc *guc, argument
45 ret = intel_guc_send_mmio(guc, action, ARRAY_SIZE(action), NULL, 0);
52 static int guc_hwconfig_discover_size(struct intel_guc *guc, struct intel_hwconfig *hwconfig) argument
60 ret = __guc_action_get_hwconfig(guc, 0, 0);
71 static int guc_hwconfig_fill_buffer(struct intel_guc *guc, struct intel_hwconfig *hwconfig) argument
80 ret = intel_guc_allocate_and_map_vma(guc, hwconfig->size, &vma, &vaddr);
84 ggtt_offset = intel_guc_ggtt_offset(guc, vma);
86 ret = __guc_action_get_hwconfig(guc, ggtt_offset, hwconfig->size);
114 struct intel_guc *guc = &gt->uc.guc; local
[all...]
/linux-master/drivers/gpu/drm/xe/
H A Dxe_guc.h16 void xe_guc_comm_init_early(struct xe_guc *guc);
17 int xe_guc_init(struct xe_guc *guc);
18 int xe_guc_init_post_hwconfig(struct xe_guc *guc);
19 int xe_guc_post_load_init(struct xe_guc *guc);
20 int xe_guc_reset(struct xe_guc *guc);
21 int xe_guc_upload(struct xe_guc *guc);
22 int xe_guc_min_load_for_hwconfig(struct xe_guc *guc);
23 int xe_guc_enable_communication(struct xe_guc *guc);
24 int xe_guc_suspend(struct xe_guc *guc);
25 void xe_guc_notify(struct xe_guc *guc);
63 guc_to_gt(struct xe_guc *guc) argument
68 guc_to_xe(struct xe_guc *guc) argument
[all...]
H A Dxe_guc_hwconfig.h13 int xe_guc_hwconfig_init(struct xe_guc *guc);
14 u32 xe_guc_hwconfig_size(struct xe_guc *guc);
15 void xe_guc_hwconfig_copy(struct xe_guc *guc, void *dst);
H A Dxe_guc_hwconfig.c17 static int send_get_hwconfig(struct xe_guc *guc, u32 ggtt_addr, u32 size) argument
26 return xe_guc_mmio_send(guc, action, ARRAY_SIZE(action));
29 static int guc_hwconfig_size(struct xe_guc *guc, u32 *size) argument
31 int ret = send_get_hwconfig(guc, 0, 0);
40 static int guc_hwconfig_copy(struct xe_guc *guc) argument
42 int ret = send_get_hwconfig(guc, xe_bo_ggtt_addr(guc->hwconfig.bo),
43 guc->hwconfig.size);
51 int xe_guc_hwconfig_init(struct xe_guc *guc) argument
53 struct xe_device *xe = guc_to_xe(guc);
91 xe_guc_hwconfig_size(struct xe_guc *guc) argument
96 xe_guc_hwconfig_copy(struct xe_guc *guc, void *dst) argument
[all...]
H A Dxe_guc_submit.h15 int xe_guc_submit_init(struct xe_guc *guc);
17 int xe_guc_submit_reset_prepare(struct xe_guc *guc);
18 void xe_guc_submit_reset_wait(struct xe_guc *guc);
19 int xe_guc_submit_stop(struct xe_guc *guc);
20 int xe_guc_submit_start(struct xe_guc *guc);
22 int xe_guc_sched_done_handler(struct xe_guc *guc, u32 *msg, u32 len);
23 int xe_guc_deregister_done_handler(struct xe_guc *guc, u32 *msg, u32 len);
24 int xe_guc_exec_queue_reset_handler(struct xe_guc *guc, u32 *msg, u32 len);
25 int xe_guc_exec_queue_memory_cat_error_handler(struct xe_guc *guc, u32 *msg,
27 int xe_guc_exec_queue_reset_failure_handler(struct xe_guc *guc, u3
[all...]
H A Dxe_guc_debugfs.h12 void xe_guc_debugfs_register(struct xe_guc *guc, struct dentry *parent);
H A Dxe_guc.c38 static u32 guc_bo_ggtt_addr(struct xe_guc *guc, argument
41 struct xe_device *xe = guc_to_xe(guc);
44 xe_assert(xe, addr >= xe_wopcm_size(guc_to_xe(guc)));
51 static u32 guc_ctl_debug_flags(struct xe_guc *guc) argument
53 u32 level = xe_guc_log_get_level(&guc->log);
65 static u32 guc_ctl_feature_flags(struct xe_guc *guc) argument
69 if (!guc_to_xe(guc)->info.skip_guc_pc)
75 static u32 guc_ctl_log_params_flags(struct xe_guc *guc) argument
77 u32 offset = guc_bo_ggtt_addr(guc, guc
128 guc_ctl_ads_flags(struct xe_guc *guc) argument
138 guc_ctl_wa_flags(struct xe_guc *guc) argument
185 guc_ctl_devid(struct xe_guc *guc) argument
192 guc_init_params(struct xe_guc *guc) argument
212 guc_init_params_post_hwconfig(struct xe_guc *guc) argument
237 guc_write_params(struct xe_guc *guc) argument
252 struct xe_guc *guc = arg; local
265 xe_guc_comm_init_early(struct xe_guc *guc) argument
275 xe_guc_realloc_post_hwconfig(struct xe_guc *guc) argument
303 xe_guc_init(struct xe_guc *guc) argument
356 xe_guc_init_post_hwconfig(struct xe_guc *guc) argument
373 xe_guc_post_load_init(struct xe_guc *guc) argument
381 xe_guc_reset(struct xe_guc *guc) argument
415 guc_prepare_xfer(struct xe_guc *guc) argument
440 guc_xfer_rsa(struct xe_guc *guc) argument
464 guc_wait_ucode(struct xe_guc *guc) argument
521 __xe_guc_upload(struct xe_guc *guc) argument
571 xe_guc_min_load_for_hwconfig(struct xe_guc *guc) argument
595 xe_guc_upload(struct xe_guc *guc) argument
602 guc_handle_mmio_msg(struct xe_guc *guc) argument
623 guc_enable_irq(struct xe_guc *guc) argument
641 xe_guc_enable_communication(struct xe_guc *guc) argument
669 xe_guc_suspend(struct xe_guc *guc) argument
687 xe_guc_notify(struct xe_guc *guc) argument
700 xe_guc_auth_huc(struct xe_guc *guc, u32 rsa_addr) argument
710 xe_guc_mmio_send_recv(struct xe_guc *guc, const u32 *request, u32 len, u32 *response_buf) argument
830 xe_guc_mmio_send(struct xe_guc *guc, const u32 *request, u32 len) argument
835 guc_self_cfg(struct xe_guc *guc, u16 key, u16 len, u64 val) argument
868 xe_guc_self_cfg32(struct xe_guc *guc, u16 key, u32 val) argument
873 xe_guc_self_cfg64(struct xe_guc *guc, u16 key, u64 val) argument
878 xe_guc_irq_handler(struct xe_guc *guc, const u16 iir) argument
884 xe_guc_sanitize(struct xe_guc *guc) argument
891 xe_guc_reset_prepare(struct xe_guc *guc) argument
896 xe_guc_reset_wait(struct xe_guc *guc) argument
901 xe_guc_stop_prepare(struct xe_guc *guc) argument
906 xe_guc_stop(struct xe_guc *guc) argument
919 xe_guc_start(struct xe_guc *guc) argument
929 xe_guc_print_info(struct xe_guc *guc, struct drm_printer *p) argument
977 xe_guc_in_reset(struct xe_guc *guc) argument
[all...]
H A Dxe_guc_submit.c45 return &q->gt->uc.guc;
64 return atomic_read(&q->guc->state) & EXEC_QUEUE_STATE_REGISTERED;
69 atomic_or(EXEC_QUEUE_STATE_REGISTERED, &q->guc->state);
74 atomic_and(~EXEC_QUEUE_STATE_REGISTERED, &q->guc->state);
79 return atomic_read(&q->guc->state) & ENGINE_STATE_ENABLED;
84 atomic_or(ENGINE_STATE_ENABLED, &q->guc->state);
89 atomic_and(~ENGINE_STATE_ENABLED, &q->guc->state);
94 return atomic_read(&q->guc->state) & EXEC_QUEUE_STATE_PENDING_ENABLE;
99 atomic_or(EXEC_QUEUE_STATE_PENDING_ENABLE, &q->guc->state);
104 atomic_and(~EXEC_QUEUE_STATE_PENDING_ENABLE, &q->guc
183 alloc_submit_wq(struct xe_guc *guc) argument
203 free_submit_wq(struct xe_guc *guc) argument
211 get_submit_wq(struct xe_guc *guc) argument
218 alloc_submit_wq(struct xe_guc *guc) argument
223 free_submit_wq(struct xe_guc *guc) argument
228 get_submit_wq(struct xe_guc *guc) argument
236 struct xe_guc *guc = arg; local
252 primelockdep(struct xe_guc *guc) argument
266 xe_guc_submit_init(struct xe_guc *guc) argument
301 __release_guc_id(struct xe_guc *guc, struct xe_exec_queue *q, u32 xa_count) argument
318 alloc_guc_id(struct xe_guc *guc, struct xe_exec_queue *q) argument
366 release_guc_id(struct xe_guc *guc, struct xe_exec_queue *q) argument
421 init_policies(struct xe_guc *guc, struct xe_exec_queue *q) argument
440 set_min_preemption_timeout(struct xe_guc *guc, struct xe_exec_queue *q) argument
458 __register_mlrc_engine(struct xe_guc *guc, struct xe_exec_queue *q, struct guc_ctxt_registration_info *info) argument
497 __register_engine(struct xe_guc *guc, struct guc_ctxt_registration_info *info) argument
520 struct xe_guc *guc = exec_queue_to_guc(q); local
579 struct xe_guc *guc = exec_queue_to_guc(q); local
607 struct xe_guc *guc = exec_queue_to_guc(q); local
627 struct xe_guc *guc = exec_queue_to_guc(q); local
672 struct xe_guc *guc = exec_queue_to_guc(q); local
727 struct xe_guc *guc = exec_queue_to_guc(q); local
762 guc_read_stopped(struct xe_guc *guc) argument
774 disable_scheduling_deregister(struct xe_guc *guc, struct xe_exec_queue *q) argument
814 struct xe_guc *guc = exec_queue_to_guc(q); local
858 struct xe_guc *guc = exec_queue_to_guc(q); local
875 struct xe_guc *guc = exec_queue_to_guc(q); local
897 struct xe_guc *guc = exec_queue_to_guc(q); local
967 struct xe_guc *guc = exec_queue_to_guc(q); local
1029 struct xe_guc *guc = exec_queue_to_guc(q); local
1054 __guc_exec_queue_fini(struct xe_guc *guc, struct xe_exec_queue *q) argument
1069 struct xe_guc *guc = exec_queue_to_guc(q); local
1089 struct xe_guc *guc = exec_queue_to_guc(q); local
1098 struct xe_guc *guc = exec_queue_to_guc(q); local
1113 struct xe_guc *guc = exec_queue_to_guc(q); local
1148 struct xe_guc *guc = exec_queue_to_guc(q); local
1206 struct xe_guc *guc = exec_queue_to_guc(q); local
1370 struct xe_guc *guc = exec_queue_to_guc(q); local
1379 struct xe_guc *guc = exec_queue_to_guc(q); local
1411 guc_exec_queue_stop(struct xe_guc *guc, struct xe_exec_queue *q) argument
1455 xe_guc_submit_reset_prepare(struct xe_guc *guc) argument
1473 xe_guc_submit_reset_wait(struct xe_guc *guc) argument
1478 xe_guc_submit_stop(struct xe_guc *guc) argument
1517 xe_guc_submit_start(struct xe_guc *guc) argument
1537 g2h_exec_queue_lookup(struct xe_guc *guc, u32 guc_id) argument
1559 deregister_exec_queue(struct xe_guc *guc, struct xe_exec_queue *q) argument
1571 xe_guc_sched_done_handler(struct xe_guc *guc, u32 *msg, u32 len) argument
1616 xe_guc_deregister_done_handler(struct xe_guc *guc, u32 *msg, u32 len) argument
1650 xe_guc_exec_queue_reset_handler(struct xe_guc *guc, u32 *msg, u32 len) argument
1684 xe_guc_exec_queue_memory_cat_error_handler(struct xe_guc *guc, u32 *msg, u32 len) argument
1711 xe_guc_exec_queue_reset_failure_handler(struct xe_guc *guc, u32 *msg, u32 len) argument
1739 struct xe_guc *guc = exec_queue_to_guc(q); local
1975 xe_guc_submit_print(struct xe_guc *guc, struct drm_printer *p) argument
[all...]
H A Dxe_guc_debugfs.c25 struct xe_guc *guc = node_to_guc(m->private); local
26 struct xe_device *xe = guc_to_xe(guc);
30 xe_guc_print_info(guc, &p);
38 struct xe_guc *guc = node_to_guc(m->private); local
39 struct xe_device *xe = guc_to_xe(guc);
43 xe_guc_log_print(&guc->log, &p);
54 void xe_guc_debugfs_register(struct xe_guc *guc, struct dentry *parent) argument
56 struct drm_minor *minor = guc_to_xe(guc)->drm.primary;
61 local = drmm_kmalloc(&guc_to_xe(guc)->drm, DEBUGFS_SIZE, GFP_KERNEL);
69 local[i].data = guc;
[all...]
H A Dxe_uc_types.h18 /** @guc: Graphics micro controller */
19 struct xe_guc guc; member in struct:xe_uc
H A Dxe_wopcm_types.h17 /** @guc: GuC WOPCM Region info */
19 /** @guc.base: GuC WOPCM base which is offset from WOPCM base */
21 /** @guc.size: Size of the GuC WOPCM region */
23 } guc; member in struct:xe_wopcm
H A Dxe_gt_pagefault.h16 int xe_guc_pagefault_handler(struct xe_guc *guc, u32 *msg, u32 len);
17 int xe_guc_access_counter_notify_handler(struct xe_guc *guc, u32 *msg, u32 len);
/linux-master/drivers/gpu/drm/i915/gt/
H A Dintel_wopcm.h15 * @guc: GuC WOPCM Region info.
16 * @guc.base: GuC WOPCM base which is offset from WOPCM base.
17 * @guc.size: Size of the GuC WOPCM region.
24 } guc; member in struct:intel_wopcm
39 return wopcm->guc.base;
54 return wopcm->guc.size;

Completed in 402 milliseconds

1234