Searched refs:hwe (Results 1 - 25 of 38) sorted by relevance

12

/linux-master/drivers/gpu/drm/xe/
H A Dxe_tuning.h13 void xe_tuning_process_engine(struct xe_hw_engine *hwe);
14 void xe_tuning_process_lrc(struct xe_hw_engine *hwe);
H A Dxe_hw_engine.h51 void xe_hw_engine_handle_irq(struct xe_hw_engine *hwe, u16 intr_vec);
52 void xe_hw_engine_enable_ring(struct xe_hw_engine *hwe);
57 xe_hw_engine_snapshot_capture(struct xe_hw_engine *hwe);
61 void xe_hw_engine_print(struct xe_hw_engine *hwe, struct drm_printer *p);
62 void xe_hw_engine_setup_default_lrc_state(struct xe_hw_engine *hwe);
64 bool xe_hw_engine_is_reserved(struct xe_hw_engine *hwe);
65 static inline bool xe_hw_engine_is_valid(struct xe_hw_engine *hwe) argument
67 return hwe->name;
H A Dxe_hw_engine.c264 struct xe_hw_engine *hwe = arg; local
266 if (hwe->exl_port)
267 xe_execlist_port_destroy(hwe->exl_port);
268 xe_lrc_finish(&hwe->kernel_lrc);
270 hwe->gt = NULL;
273 static void hw_engine_mmio_write32(struct xe_hw_engine *hwe, struct xe_reg reg, argument
276 xe_gt_assert(hwe->gt, !(reg.addr & hwe->mmio_base));
277 xe_force_wake_assert_held(gt_to_fw(hwe->gt), hwe
284 hw_engine_mmio_read32(struct xe_hw_engine *hwe, struct xe_reg reg) argument
294 xe_hw_engine_enable_ring(struct xe_hw_engine *hwe) argument
313 xe_hw_engine_match_fixed_cslice_mode(const struct xe_gt *gt, const struct xe_hw_engine *hwe) argument
320 xe_rtp_cfeg_wmtp_disabled(const struct xe_gt *gt, const struct xe_hw_engine *hwe) argument
334 xe_hw_engine_setup_default_lrc_state(struct xe_hw_engine *hwe) argument
378 hw_engine_setup_default_state(struct xe_hw_engine *hwe) argument
430 hw_engine_init_early(struct xe_gt *gt, struct xe_hw_engine *hwe, enum xe_hw_engine_id id) argument
479 hw_engine_init(struct xe_gt *gt, struct xe_hw_engine *hwe, enum xe_hw_engine_id id) argument
541 struct xe_hw_engine *hwe; local
714 struct xe_hw_engine *hwe; local
728 xe_hw_engine_handle_irq(struct xe_hw_engine *hwe, u16 intr_vec) argument
750 xe_hw_engine_snapshot_capture(struct xe_hw_engine *hwe) argument
888 xe_hw_engine_print(struct xe_hw_engine *hwe, struct drm_printer *p) argument
911 xe_hw_engine_is_reserved(struct xe_hw_engine *hwe) argument
[all...]
H A Dxe_wa.h17 void xe_wa_process_engine(struct xe_hw_engine *hwe);
18 void xe_wa_process_lrc(struct xe_hw_engine *hwe);
21 void xe_reg_whitelist_process_engine(struct xe_hw_engine *hwe);
H A Dxe_rtp.c33 struct xe_hw_engine *hwe,
85 if (drm_WARN_ON(&xe->drm, !hwe))
88 match = hwe->class == r->engine_class;
91 if (drm_WARN_ON(&xe->drm, !hwe))
94 match = hwe->class != r->engine_class;
97 match = r->match_func(gt, hwe);
130 struct xe_hw_engine *hwe, struct xe_reg_sr *sr)
136 if (!rule_matches(xe, gt, hwe, entry->rules, entry->n_rules))
142 mmio_base = hwe->mmio_base;
153 struct xe_hw_engine **hwe,
31 rule_matches(const struct xe_device *xe, struct xe_gt *gt, struct xe_hw_engine *hwe, const struct xe_rtp_rule *rules, unsigned int n_rules) argument
128 rtp_process_one_sr(const struct xe_rtp_entry_sr *entry, struct xe_device *xe, struct xe_gt *gt, struct xe_hw_engine *hwe, struct xe_reg_sr *sr) argument
152 rtp_get_context(struct xe_rtp_process_ctx *ctx, struct xe_hw_engine **hwe, struct xe_gt **gt, struct xe_device **xe) argument
224 struct xe_hw_engine *hwe = NULL; local
268 struct xe_hw_engine *hwe; local
296 xe_rtp_match_even_instance(const struct xe_gt *gt, const struct xe_hw_engine *hwe) argument
302 xe_rtp_match_first_render_or_compute(const struct xe_gt *gt, const struct xe_hw_engine *hwe) argument
312 xe_rtp_match_first_gslice_fused_off(const struct xe_gt *gt, const struct xe_hw_engine *hwe) argument
[all...]
H A Dxe_execlist.c44 static void __start_lrc(struct xe_hw_engine *hwe, struct xe_lrc *lrc, argument
47 struct xe_gt *gt = hwe->gt;
54 xe_gt_assert(hwe->gt, FIELD_FIT(XEHP_SW_CTX_ID, ctx_id));
57 xe_gt_assert(hwe->gt, FIELD_FIT(SW_CTX_ID, ctx_id));
61 if (hwe->class == XE_ENGINE_CLASS_COMPUTE)
62 xe_mmio_write32(hwe->gt, RCU_MODE,
80 xe_mmio_write32(gt, RING_HWS_PGA(hwe->mmio_base),
81 xe_bo_ggtt_addr(hwe->hwsp));
82 xe_mmio_read32(gt, RING_HWS_PGA(hwe->mmio_base));
83 xe_mmio_write32(gt, RING_MODE(hwe
167 read_execlist_status(struct xe_hw_engine *hwe) argument
191 xe_execlist_port_irq_handler(struct xe_hw_engine *hwe, u16 intr_vec) argument
253 xe_execlist_port_create(struct xe_device *xe, struct xe_hw_engine *hwe) argument
[all...]
H A Dxe_gt.c150 struct xe_reg_sr *sr = &q->hwe->reg_lrc;
159 if (q->hwe->class == XE_ENGINE_CLASS_RENDER)
161 bb = xe_bb_new(gt, xe_lrc_size(gt_to_xe(gt), q->hwe->class), false);
228 struct xe_hw_engine *hwe; local
232 for_each_hw_engine(hwe, gt, id) {
236 if (gt->default_lrc[hwe->class])
239 xe_reg_sr_init(&hwe->reg_lrc, hwe->name, xe);
240 xe_wa_process_lrc(hwe);
241 xe_hw_engine_setup_default_lrc_state(hwe);
590 struct xe_hw_engine *hwe; local
786 struct xe_hw_engine *hwe; local
801 struct xe_hw_engine *hwe; local
[all...]
H A Dxe_execlist.h18 struct xe_hw_engine *hwe);
H A Dxe_reg_sr.h26 void xe_reg_sr_apply_whitelist(struct xe_hw_engine *hwe);
H A Dxe_reg_whitelist.h16 void xe_reg_whitelist_process_engine(struct xe_hw_engine *hwe);
H A Dxe_tuning.c107 void xe_tuning_process_engine(struct xe_hw_engine *hwe) argument
109 struct xe_rtp_process_ctx ctx = XE_RTP_PROCESS_CTX_INITIALIZER(hwe);
111 xe_rtp_process_to_sr(&ctx, engine_tunings, &hwe->reg_sr);
117 * @hwe: engine instance to process tunings for
119 * Process LRC table for this platform, saving in @hwe all the tunings that need
123 void xe_tuning_process_lrc(struct xe_hw_engine *hwe) argument
125 struct xe_rtp_process_ctx ctx = XE_RTP_PROCESS_CTX_INITIALIZER(hwe);
127 xe_rtp_process_to_sr(&ctx, lrc_tunings, &hwe->reg_lrc);
H A Dxe_devcoredump_types.h41 /** @hwe: HW Engine snapshot array */
42 struct xe_hw_engine_snapshot *hwe[XE_NUM_HW_ENGINES]; member in struct:xe_devcoredump_snapshot
H A Dxe_gt.h66 static inline bool xe_gt_is_usm_hwe(struct xe_gt *gt, struct xe_hw_engine *hwe) argument
70 return xe->info.has_usm && hwe->class == XE_ENGINE_CLASS_COPY &&
71 hwe->instance == gt->usm.reserved_bcs_instance;
H A Dxe_devcoredump.c117 if (coredump->snapshot.hwe[i])
118 xe_hw_engine_snapshot_print(coredump->snapshot.hwe[i],
143 if (coredump->snapshot.hwe[i])
144 xe_hw_engine_snapshot_free(coredump->snapshot.hwe[i]);
160 struct xe_hw_engine *hwe; local
190 for_each_hw_engine(hwe, q->gt, id) {
191 if (hwe->class != q->hwe->class ||
192 !(BIT(hwe->logical_instance) & adj_logical_mask)) {
193 coredump->snapshot.hwe[i
[all...]
H A Dxe_reg_whitelist.c20 const struct xe_hw_engine *hwe)
22 return hwe->class != XE_ENGINE_CLASS_RENDER;
72 * @hwe: engine instance to process whitelist for
74 * Process wwhitelist table for this platform, saving in @hwe all the
78 void xe_reg_whitelist_process_engine(struct xe_hw_engine *hwe) argument
80 struct xe_rtp_process_ctx ctx = XE_RTP_PROCESS_CTX_INITIALIZER(hwe);
82 xe_rtp_process_to_sr(&ctx, register_whitelist, &hwe->reg_whitelist);
19 match_not_render(const struct xe_gt *gt, const struct xe_hw_engine *hwe) argument
H A Dxe_guc_ads.c215 struct xe_hw_engine *hwe; local
219 for_each_hw_engine(hwe, gt, id)
220 xa_for_each(&hwe->reg_sr.xa, sr_idx, sr_entry)
233 struct xe_hw_engine *hwe; local
237 for_each_hw_engine(hwe, gt, id)
238 if (hwe->class == class)
239 mask |= BIT(hwe->instance);
381 struct xe_hw_engine *hwe; local
386 for_each_hw_engine(hwe, gt, id) {
389 guc_class = xe_engine_class_to_guc_class(hwe
426 guc_mmio_regset_write(struct xe_guc_ads *ads, struct iosys_map *regset_map, struct xe_hw_engine *hwe) argument
476 struct xe_hw_engine *hwe; local
[all...]
H A Dxe_gt_debugfs.c36 struct xe_hw_engine *hwe; local
47 for_each_hw_engine(hwe, gt, id)
48 xe_hw_engine_print(hwe, &p);
110 struct xe_hw_engine *hwe; local
117 for_each_hw_engine(hwe, gt, id)
118 xe_reg_sr_dump(&hwe->reg_sr, &p);
122 for_each_hw_engine(hwe, gt, id)
123 xe_reg_sr_dump(&hwe->reg_lrc, &p);
127 for_each_hw_engine(hwe, gt, id)
128 xe_reg_whitelist_dump(&hwe
[all...]
H A Dxe_gt_ccs_mode.c47 struct xe_hw_engine *hwe; local
50 for_each_hw_engine(hwe, gt, id) {
51 if (hwe->class != XE_ENGINE_CLASS_COMPUTE)
54 if (hwe->logical_instance >= num_engines)
57 config |= BIT(hwe->instance) << XE_HW_ENGINE_CCS0;
64 mode |= CCS_MODE_CSLICE(cslice, hwe->instance);
H A Dxe_execlist_types.h19 struct xe_hw_engine *hwe; member in struct:xe_execlist_port
H A Dxe_exec_queue.c39 u16 width, struct xe_hw_engine *hwe,
43 struct xe_gt *gt = hwe->gt;
55 q->hwe = hwe;
57 q->class = hwe->class;
60 q->fence_irq = &gt->fence_irq[hwe->class];
61 q->ring_ops = gt->ring_ops[hwe->class];
66 q->sched_props.timeslice_us = hwe->eclass->sched_props.timeslice_us;
68 hwe->eclass->sched_props.preempt_timeout_us;
70 hwe
36 __xe_exec_queue_alloc(struct xe_device *xe, struct xe_vm *vm, u32 logical_mask, u16 width, struct xe_hw_engine *hwe, u32 flags, u64 extensions) argument
141 xe_exec_queue_create(struct xe_device *xe, struct xe_vm *vm, u32 logical_mask, u16 width, struct xe_hw_engine *hwe, u32 flags, u64 extensions) argument
177 struct xe_hw_engine *hwe, *hwe0 = NULL; local
469 struct xe_hw_engine *hwe; local
512 struct xe_hw_engine *hwe; local
554 struct xe_hw_engine *hwe; local
[all...]
H A Dxe_rtp_types.h87 const struct xe_hw_engine *hwe);
117 struct xe_hw_engine *hwe; member in union:xe_rtp_process_ctx::__anon896
H A Dxe_memirq.c361 struct xe_hw_engine *hwe)
363 memirq_debug(memirq, "STATUS %s %*ph\n", hwe->name, 16, status->vaddr);
365 if (memirq_received(memirq, status, ilog2(GT_RENDER_USER_INTERRUPT), hwe->name))
366 xe_hw_engine_handle_irq(hwe, GT_RENDER_USER_INTERRUPT);
390 struct xe_hw_engine *hwe; local
407 for_each_hw_engine(hwe, gt, id) {
408 if (memirq_received(memirq, &memirq->source, hwe->irq_offset, "SRC")) {
410 hwe->irq_offset * SZ_16);
411 memirq_dispatch_engine(memirq, &map, hwe);
360 memirq_dispatch_engine(struct xe_memirq *memirq, struct iosys_map *status, struct xe_hw_engine *hwe) argument
H A Dxe_lrc.c91 const struct xe_hw_engine *hwe)
101 const u32 base = hwe->mmio_base;
122 xe_gt_assert(hwe->gt, count);
526 static void set_context_control(u32 *regs, struct xe_hw_engine *hwe) argument
534 static void set_memory_based_intr(u32 *regs, struct xe_hw_engine *hwe) argument
536 struct xe_memirq *memirq = &gt_to_tile(hwe->gt)->sriov.vf.memirq;
537 struct xe_device *xe = gt_to_xe(hwe->gt);
555 static int lrc_ring_mi_mode(struct xe_hw_engine *hwe) argument
557 struct xe_device *xe = gt_to_xe(hwe->gt);
565 static void reset_stop_ring(u32 *regs, struct xe_hw_engine *hwe) argument
89 set_offsets(u32 *regs, const u8 *data, const struct xe_hw_engine *hwe) argument
676 empty_lrc_data(struct xe_hw_engine *hwe) argument
707 xe_lrc_init(struct xe_lrc *lrc, struct xe_hw_engine *hwe, struct xe_exec_queue *q, struct xe_vm *vm, u32 ring_size) argument
[all...]
H A Dxe_query.c43 struct xe_hw_engine *hwe; local
50 for_each_hw_engine(hwe, gt, id) {
51 if (xe_hw_engine_is_reserved(hwe))
116 struct xe_hw_engine *hwe; local
145 hwe = xe_gt_hw_engine(gt, user_to_xe_engine_class[eci->engine_class],
147 if (!hwe)
154 RING_TIMESTAMP(hwe->mmio_base),
155 RING_TIMESTAMP_UDW(hwe->mmio_base),
188 struct xe_hw_engine *hwe; local
206 for_each_hw_engine(hwe, g
[all...]
/linux-master/drivers/net/ethernet/mediatek/
H A Dmtk_ppe.c518 struct mtk_foe_entry *hwe = mtk_foe_get_entry(ppe, entry->hash); local
520 hwe->ib1 &= ~MTK_FOE_IB1_STATE;
521 hwe->ib1 |= FIELD_PREP(MTK_FOE_IB1_STATE, MTK_FOE_STATE_INVALID);
559 struct mtk_foe_entry *hwe; local
568 hwe = mtk_foe_get_entry(ppe, cur->hash);
569 ib1 = READ_ONCE(hwe->ib1);
583 entry->data.ib1 |= hwe->ib1 & ib1_ts_mask;
591 struct mtk_foe_entry *hwe; local
603 hwe = mtk_foe_get_entry(ppe, entry->hash);
604 memcpy(&foe, hwe, pp
622 struct mtk_foe_entry *hwe; local
703 struct mtk_foe_entry foe = {}, *hwe; local
742 struct mtk_foe_entry *hwe = mtk_foe_get_entry(ppe, hash); local
968 struct mtk_foe_entry *hwe; local
1085 struct mtk_foe_entry *hwe = mtk_foe_get_entry(ppe, i); local
[all...]

Completed in 273 milliseconds

12