Searched refs:ce (Results 1 - 25 of 212) sorted by last modified time

123456789

/linux-master/drivers/gpu/drm/i915/gt/
H A Dintel_workarounds.c3161 static int engine_wa_list_verify(struct intel_context *ce, argument
3176 vma = __vm_create_scratch_for_read(&ce->engine->gt->ggtt->vm,
3181 intel_engine_pm_get(ce->engine);
3186 err = intel_context_pin_ww(ce, &ww);
3195 rq = i915_request_create(ce);
3240 intel_context_unpin(ce);
3248 intel_engine_pm_put(ce->engine);
H A Dintel_gt.c545 struct intel_context *ce; local
551 ce = intel_context_create(engine);
552 if (IS_ERR(ce)) {
553 err = PTR_ERR(ce);
557 err = intel_renderstate_init(&so, ce);
561 rq = i915_request_create(ce);
579 intel_renderstate_fini(&so, ce);
582 intel_context_put(ce);
629 struct intel_context *ce; local
636 ce
[all...]
H A Dintel_engine_cs.c1336 static int measure_breadcrumb_dw(struct intel_context *ce) argument
1338 struct intel_engine_cs *engine = ce->engine;
1350 frame->rq.context = ce;
1351 rcu_assign_pointer(frame->rq.timeline, ce->timeline);
1352 frame->rq.hwsp_seqno = ce->timeline->hwsp_seqno;
1362 mutex_lock(&ce->timeline->mutex);
1368 mutex_unlock(&ce->timeline->mutex);
1384 struct intel_context *ce; local
1387 ce = intel_context_create(engine);
1388 if (IS_ERR(ce))
1418 intel_engine_destroy_pinned_context(struct intel_context *ce) argument
1471 struct intel_context *ce, *bce = NULL; local
2543 intel_engine_get_hung_entity(struct intel_engine_cs *engine, struct intel_context **ce, struct i915_request **rq) argument
[all...]
H A Dintel_execlists_submission.c434 struct intel_context * const ce = rq->context; local
459 head = __active_request(ce->timeline, rq, -EIO)->head;
460 head = intel_ring_wrap(ce->ring, head);
463 lrc_init_regs(ce, engine, true);
466 ce->lrc.lrca = lrc_update_regs(ce, engine, head);
478 struct intel_context * const ce = rq->context; local
480 intel_context_get(ce);
482 if (unlikely(intel_context_is_closed(ce) &&
484 intel_context_set_exiting(ce);
532 struct intel_context * const ce = rq->context; local
560 kick_siblings(struct i915_request *rq, struct intel_context *ce) argument
589 __execlists_schedule_out(struct i915_request * const rq, struct intel_context * const ce) argument
653 struct intel_context * const ce = rq->context; local
677 struct intel_context *ce = rq->context; local
785 struct intel_context *ce = NULL; local
944 ctx_single_port_submission(const struct intel_context *ce) argument
2606 __execlists_context_pre_pin(struct intel_context *ce, struct intel_engine_cs *engine, struct i915_gem_ww_ctx *ww, void **vaddr) argument
2625 execlists_context_pre_pin(struct intel_context *ce, struct i915_gem_ww_ctx *ww, void **vaddr) argument
2632 execlists_context_pin(struct intel_context *ce, void *vaddr) argument
2637 execlists_context_alloc(struct intel_context *ce) argument
2642 execlists_context_cancel_request(struct intel_context *ce, struct i915_request *rq) argument
2660 struct intel_context *parent = NULL, *ce, *err; local
3029 struct intel_context *ce; local
3712 virtual_context_alloc(struct intel_context *ce) argument
3719 virtual_context_pre_pin(struct intel_context *ce, struct i915_gem_ww_ctx *ww, void **vaddr) argument
3729 virtual_context_pin(struct intel_context *ce, void *vaddr) argument
3736 virtual_context_enter(struct intel_context *ce) argument
3747 virtual_context_exit(struct intel_context *ce) argument
[all...]
H A Dintel_engine_pm.c35 static void dbg_poison_ce(struct intel_context *ce) argument
40 if (ce->state) {
41 struct drm_i915_gem_object *obj = ce->state->obj;
42 int type = intel_gt_coherent_map_type(ce->engine->gt, obj, true);
62 struct intel_context *ce; local
69 ce = engine->kernel_context;
70 if (ce) {
71 GEM_BUG_ON(test_bit(CONTEXT_VALID_BIT, &ce->flags));
74 while (unlikely(intel_context_inflight(ce)))
78 dbg_poison_ce(ce);
153 struct intel_context *ce = engine->kernel_context; local
313 struct intel_context *ce; local
[all...]
/linux-master/drivers/gpu/drm/amd/amdgpu/
H A Dgfx_v10_0.c4657 amdgpu_bo_free_kernel(&adev->gfx.ce.ce_fw_obj,
4658 &adev->gfx.ce.ce_fw_gpu_addr,
4659 (void **)&adev->gfx.ce.ce_fw_ptr);
5366 /* ce ucode */
5540 /* Program ce ucode address into intruction cache address register */
5795 &adev->gfx.ce.ce_fw_obj,
5796 &adev->gfx.ce.ce_fw_gpu_addr,
5797 (void **)&adev->gfx.ce.ce_fw_ptr);
5799 dev_err(adev->dev, "(%d) failed to create ce fw bo\n", r);
5804 memcpy(adev->gfx.ce
[all...]
/linux-master/drivers/iommu/intel/
H A Diommu.c2454 struct context_entry *new_ce = NULL, ce; local
2508 memcpy(&ce, old_ce + idx, sizeof(ce));
2510 if (!context_present(&ce))
2513 did = context_domain_id(&ce);
2518 new_ce[idx] = ce;
/linux-master/net/ipv6/netfilter/
H A Dip6_tables.c1239 struct compat_ip6t_entry __user *ce; local
1246 ce = *dstptr;
1247 if (copy_to_user(ce, e, sizeof(struct ip6t_entry)) != 0 ||
1248 copy_to_user(&ce->counters, &counters[i],
1266 if (put_user(target_offset, &ce->target_offset) != 0 ||
1267 put_user(next_offset, &ce->next_offset) != 0)
/linux-master/net/ipv4/netfilter/
H A Dip_tables.c1223 struct compat_ipt_entry __user *ce; local
1230 ce = *dstptr;
1231 if (copy_to_user(ce, e, sizeof(struct ipt_entry)) != 0 ||
1232 copy_to_user(&ce->counters, &counters[i],
1250 if (put_user(target_offset, &ce->target_offset) != 0 ||
1251 put_user(next_offset, &ce->next_offset) != 0)
H A Darp_tables.c1311 struct compat_arpt_entry __user *ce; local
1317 ce = *dstptr;
1318 if (copy_to_user(ce, e, sizeof(struct arpt_entry)) != 0 ||
1319 copy_to_user(&ce->counters, &counters[i],
1333 if (put_user(target_offset, &ce->target_offset) != 0 ||
1334 put_user(next_offset, &ce->next_offset) != 0)
/linux-master/drivers/gpu/drm/i915/gt/uc/
H A Dintel_guc_submission.c112 * ce->guc_state.lock
113 * Protects everything under ce->guc_state. Ensures that a context is in the
121 * sched_engine->lock -> ce->guc_state.lock
122 * guc->submission_state.lock -> ce->guc_state.lock
181 static inline void init_sched_state(struct intel_context *ce) argument
183 lockdep_assert_held(&ce->guc_state.lock);
184 ce->guc_state.sched_state &= SCHED_STATE_BLOCKED_MASK;
198 static bool sched_state_is_init(struct intel_context *ce) argument
200 return !(ce->guc_state.sched_state & ~SCHED_STATE_VALID_INIT);
204 context_wait_for_deregister_to_register(struct intel_context *ce) argument
211 set_context_wait_for_deregister_to_register(struct intel_context *ce) argument
219 clr_context_wait_for_deregister_to_register(struct intel_context *ce) argument
227 context_destroyed(struct intel_context *ce) argument
233 set_context_destroyed(struct intel_context *ce) argument
240 clr_context_destroyed(struct intel_context *ce) argument
246 context_pending_disable(struct intel_context *ce) argument
251 set_context_pending_disable(struct intel_context *ce) argument
257 clr_context_pending_disable(struct intel_context *ce) argument
263 context_banned(struct intel_context *ce) argument
268 set_context_banned(struct intel_context *ce) argument
274 clr_context_banned(struct intel_context *ce) argument
280 context_enabled(struct intel_context *ce) argument
285 set_context_enabled(struct intel_context *ce) argument
291 clr_context_enabled(struct intel_context *ce) argument
297 context_pending_enable(struct intel_context *ce) argument
302 set_context_pending_enable(struct intel_context *ce) argument
308 clr_context_pending_enable(struct intel_context *ce) argument
314 context_registered(struct intel_context *ce) argument
319 set_context_registered(struct intel_context *ce) argument
325 clr_context_registered(struct intel_context *ce) argument
331 context_policy_required(struct intel_context *ce) argument
336 set_context_policy_required(struct intel_context *ce) argument
342 clr_context_policy_required(struct intel_context *ce) argument
348 context_close_done(struct intel_context *ce) argument
353 set_context_close_done(struct intel_context *ce) argument
359 context_blocked(struct intel_context *ce) argument
365 incr_context_blocked(struct intel_context *ce) argument
374 decr_context_blocked(struct intel_context *ce) argument
389 context_guc_id_invalid(struct intel_context *ce) argument
394 set_context_guc_id_invalid(struct intel_context *ce) argument
399 ce_to_guc(struct intel_context *ce) argument
449 __get_parent_scratch_offset(struct intel_context *ce) argument
456 __get_wq_offset(struct intel_context *ce) argument
464 __get_parent_scratch(struct intel_context *ce) argument
481 __get_process_desc_v69(struct intel_context *ce) argument
489 __get_wq_desc_v70(struct intel_context *ce) argument
496 get_wq_pointer(struct intel_context *ce, u32 wqi_size) argument
518 struct intel_context *ce = xa_load(&guc->context_lookup, id); local
579 set_ctx_id_mapping(struct intel_guc *guc, u32 id, struct intel_context *ce) argument
699 struct intel_context *ce = request_to_scheduling_context(rq); local
814 wq_space_until_wrap(struct intel_context *ce) argument
819 write_wqi(struct intel_context *ce, u32 wqi_size) argument
833 guc_wq_noop_append(struct intel_context *ce) argument
852 struct intel_context *ce = request_to_scheduling_context(rq); local
896 struct intel_context *ce = request_to_scheduling_context(rq); local
913 struct intel_context *ce = request_to_scheduling_context(rq); local
990 struct intel_context *ce = request_to_scheduling_context(last); local
1079 struct intel_context *ce; local
1460 __guc_context_update_stats(struct intel_context *ce) argument
1470 guc_context_update_stats(struct intel_context *ce) argument
1485 struct intel_context *ce; local
1713 __context_to_physical_engine(struct intel_context *ce) argument
1723 guc_reset_state(struct intel_context *ce, u32 head, bool scrub) argument
1768 __unwind_incomplete_requests(struct intel_context *ce) argument
1803 __guc_reset_context(struct intel_context *ce, intel_engine_mask_t stalled) argument
1875 struct intel_context *ce; local
1905 guc_cancel_context_requests(struct intel_context *ce) argument
1973 struct intel_context *ce; local
2182 struct intel_context *ce = request_to_scheduling_context(rq); local
2206 new_guc_id(struct intel_guc *guc, struct intel_context *ce) argument
2233 __release_guc_id(struct intel_guc *guc, struct intel_context *ce) argument
2255 release_guc_id(struct intel_guc *guc, struct intel_context *ce) argument
2264 steal_guc_id(struct intel_guc *guc, struct intel_context *ce) argument
2301 assign_guc_id(struct intel_guc *guc, struct intel_context *ce) argument
2330 pin_guc_id(struct intel_guc *guc, struct intel_context *ce) argument
2380 unpin_guc_id(struct intel_guc *guc, struct intel_context *ce) argument
2399 __guc_action_register_multi_lrc_v69(struct intel_guc *guc, struct intel_context *ce, u32 guc_id, u32 offset, bool loop) argument
2423 __guc_action_register_multi_lrc_v70(struct intel_guc *guc, struct intel_context *ce, struct guc_ctxt_registration_info *info, bool loop) argument
2509 register_context_v69(struct intel_guc *guc, struct intel_context *ce, bool loop) argument
2525 register_context_v70(struct intel_guc *guc, struct intel_context *ce, bool loop) argument
2537 register_context(struct intel_context *ce, bool loop) argument
2577 deregister_context(struct intel_context *ce, u32 guc_id) argument
2587 clear_children_join_go_memory(struct intel_context *ce) argument
2597 get_children_go_value(struct intel_context *ce) argument
2602 get_children_join_value(struct intel_context *ce, u8 child_index) argument
2655 guc_context_policy_init_v70(struct intel_context *ce, bool loop) argument
2731 prepare_context_registration_info_v69(struct intel_context *ce) argument
2799 prepare_context_registration_info_v70(struct intel_context *ce, struct guc_ctxt_registration_info *info) argument
2862 try_context_registration(struct intel_context *ce, bool loop) argument
2929 __guc_context_pre_pin(struct intel_context *ce, struct intel_engine_cs *engine, struct i915_gem_ww_ctx *ww, void **vaddr) argument
2937 __guc_context_pin(struct intel_context *ce, struct intel_engine_cs *engine, void *vaddr) argument
2953 guc_context_pre_pin(struct intel_context *ce, struct i915_gem_ww_ctx *ww, void **vaddr) argument
2960 guc_context_pin(struct intel_context *ce, void *vaddr) argument
2970 guc_context_unpin(struct intel_context *ce) argument
2982 guc_context_post_unpin(struct intel_context *ce) argument
2987 __guc_context_sched_enable(struct intel_guc *guc, struct intel_context *ce) argument
3002 __guc_context_sched_disable(struct intel_guc *guc, struct intel_context *ce, u16 guc_id) argument
3021 guc_blocked_fence_complete(struct intel_context *ce) argument
3029 guc_blocked_fence_reinit(struct intel_context *ce) argument
3045 prep_context_pending_disable(struct intel_context *ce) argument
3057 guc_context_block(struct intel_context *ce) argument
3103 context_cant_unblock(struct intel_context *ce) argument
3113 guc_context_unblock(struct intel_context *ce) argument
3146 guc_context_cancel_request(struct intel_context *ce, struct i915_request *rq) argument
3191 guc_context_revoke(struct intel_context *ce, struct i915_request *rq, unsigned int preempt_timeout_ms) argument
3262 bypass_sched_disable(struct intel_guc *guc, struct intel_context *ce) argument
3279 struct intel_context *ce = local
3294 guc_id_pressure(struct intel_guc *guc, struct intel_context *ce) argument
3310 guc_context_sched_disable(struct intel_context *ce) argument
3332 guc_context_close(struct intel_context *ce) argument
3345 guc_lrc_desc_unpin(struct intel_context *ce) argument
3399 __guc_context_destroy(struct intel_context *ce) argument
3424 struct intel_context *ce; local
3449 struct intel_context *ce; local
3507 struct intel_context *ce = container_of(kref, typeof(*ce), ref); local
3542 guc_context_alloc(struct intel_context *ce) argument
3547 __guc_context_set_prio(struct intel_guc *guc, struct intel_context *ce) argument
3567 guc_context_set_prio(struct intel_guc *guc, struct intel_context *ce, u8 prio) argument
3599 add_context_inflight_prio(struct intel_context *ce, u8 guc_prio) argument
3611 sub_context_inflight_prio(struct intel_context *ce, u8 guc_prio) argument
3623 update_context_prio(struct intel_context *ce) argument
3649 struct intel_context *ce = request_to_scheduling_context(rq); local
3671 guc_prio_fini(struct i915_request *rq, struct intel_context *ce) argument
3685 struct intel_context *ce = request_to_scheduling_context(rq); local
3742 __guc_signal_context_fence(struct intel_context *ce) argument
3764 guc_signal_context_fence(struct intel_context *ce) argument
3776 context_needs_register(struct intel_context *ce, bool new_guc_id) argument
3783 guc_context_init(struct intel_context *ce) argument
3804 struct intel_context *ce = request_to_scheduling_context(rq); local
3914 guc_virtual_context_pre_pin(struct intel_context *ce, struct i915_gem_ww_ctx *ww, void **vaddr) argument
3923 guc_virtual_context_pin(struct intel_context *ce, void *vaddr) argument
3936 guc_virtual_context_unpin(struct intel_context *ce) argument
3952 guc_virtual_context_enter(struct intel_context *ce) argument
3963 guc_virtual_context_exit(struct intel_context *ce) argument
3974 guc_virtual_context_alloc(struct intel_context *ce) argument
4007 guc_parent_context_pin(struct intel_context *ce, void *vaddr) argument
4023 guc_child_context_pin(struct intel_context *ce, void *vaddr) argument
4034 guc_parent_context_unpin(struct intel_context *ce) argument
4047 guc_child_context_unpin(struct intel_context *ce) argument
4057 guc_child_context_post_unpin(struct intel_context *ce) argument
4069 struct intel_context *ce = container_of(kref, typeof(*ce), ref); local
4147 struct intel_context *parent = NULL, *ce, *err; local
4261 struct intel_context *ce = request_to_scheduling_context(rq); local
4284 struct intel_context *ce = request_to_scheduling_context(rq); local
4372 guc_kernel_context_pin(struct intel_guc *guc, struct intel_context *ce) argument
4425 struct intel_context *ce; local
4798 struct intel_context *ce; local
4971 struct intel_context *ce; local
5022 struct intel_context *ce; local
5096 capture_error_state(struct intel_guc *guc, struct intel_context *ce) argument
5135 guc_context_replay(struct intel_context *ce) argument
5143 guc_handle_context_reset(struct intel_guc *guc, struct intel_context *ce) argument
5165 struct intel_context *ce; local
5307 struct intel_context *ce; local
5369 struct intel_context *ce; local
5440 guc_log_context_priority(struct drm_printer *p, struct intel_context *ce) argument
5455 guc_log_context(struct drm_printer *p, struct intel_context *ce) argument
5477 struct intel_context *ce; local
5521 get_children_go_addr(struct intel_context *ce) argument
5530 get_children_join_addr(struct intel_context *ce, u8 child_index) argument
5548 struct intel_context *ce = rq->context; local
5595 struct intel_context *ce = rq->context; local
5639 struct intel_context *ce = rq->context; local
5689 struct intel_context *ce = rq->context; local
5733 struct intel_context *ce = rq->context; local
5765 struct intel_context *ce = rq->context; local
[all...]
/linux-master/net/bridge/netfilter/
H A Debtables.c1715 struct ebt_entry __user *ce; local
1731 if (*size < sizeof(*ce))
1734 ce = *dstptr;
1735 if (copy_to_user(ce, e, sizeof(*ce)))
1739 *dstptr += sizeof(*ce);
1758 if (put_user(watchers_offset, &ce->watchers_offset) ||
1759 put_user(target_offset, &ce->target_offset) ||
1760 put_user(next_offset, &ce->next_offset))
1763 *size -= sizeof(*ce);
[all...]
/linux-master/fs/smb/client/
H A Ddfs_cache.c108 static inline bool cache_entry_expired(const struct cache_entry *ce) argument
113 return timespec64_compare(&ts, &ce->etime) >= 0;
116 static inline void free_tgts(struct cache_entry *ce) argument
120 list_for_each_entry_safe(t, n, &ce->tlist, list) {
127 static inline void flush_cache_ent(struct cache_entry *ce) argument
129 hlist_del_init(&ce->hlist);
130 kfree(ce->path);
131 free_tgts(ce);
133 kmem_cache_free(cache_slab, ce);
143 struct cache_entry *ce; local
158 struct cache_entry *ce; local
226 dump_tgts(const struct cache_entry *ce) argument
237 dump_ce(const struct cache_entry *ce) argument
340 get_tgt_name(const struct cache_entry *ce) argument
382 copy_ref_data(const struct dfs_info3_param *refs, int numrefs, struct cache_entry *ce, const char *tgthint) argument
422 struct cache_entry *ce; local
447 struct cache_entry *ce; local
479 struct cache_entry *ce; local
537 struct cache_entry *ce; local
559 struct cache_entry *ce; local
620 update_cache_entry_locked(struct cache_entry *ce, const struct dfs_info3_param *refs, int numrefs) argument
689 struct cache_entry *ce; local
756 setup_referral(const char *path, struct cache_entry *ce, struct dfs_info3_param *ref, const char *target) argument
790 get_targets(struct cache_entry *ce, struct dfs_cache_tgt_list *tl) argument
862 struct cache_entry *ce; local
908 struct cache_entry *ce; local
949 struct cache_entry *ce; local
994 struct cache_entry *ce; local
1182 struct cache_entry *ce; local
[all...]
/linux-master/drivers/of/
H A Ddynamic.c520 static void __of_changeset_entry_destroy(struct of_changeset_entry *ce) argument
522 if (ce->action == OF_RECONFIG_ATTACH_NODE &&
523 of_node_check_flag(ce->np, OF_OVERLAY)) {
524 if (kref_read(&ce->np->kobj.kref) > 1) {
526 kref_read(&ce->np->kobj.kref), ce->np);
528 of_node_set_flag(ce->np, OF_OVERLAY_FREE_CSET);
532 of_node_put(ce->np);
533 list_del(&ce->node);
534 kfree(ce);
537 __of_changeset_entry_invert(struct of_changeset_entry *ce, struct of_changeset_entry *rce) argument
567 __of_changeset_entry_notify(struct of_changeset_entry *ce, bool revert) argument
602 __of_changeset_entry_apply(struct of_changeset_entry *ce) argument
637 __of_changeset_entry_revert(struct of_changeset_entry *ce) argument
669 struct of_changeset_entry *ce, *cen; local
699 struct of_changeset_entry *ce; local
728 struct of_changeset_entry *ce; local
803 struct of_changeset_entry *ce; local
829 struct of_changeset_entry *ce; local
904 struct of_changeset_entry *ce; local
[all...]
/linux-master/kernel/printk/
H A Dprintk.c3701 initcall_entry_t *ce; local
3710 ce = __con_initcall_start;
3712 while (ce < __con_initcall_end) {
3713 call = initcall_from_entry(ce);
3717 ce++;
/linux-master/drivers/clocksource/
H A Dtimer-riscv.c47 struct clock_event_device *ce)
109 struct clock_event_device *ce = per_cpu_ptr(&riscv_clock_event, cpu); local
114 ce->cpumask = cpumask_of(cpu);
115 ce->irq = riscv_clock_event_irq;
117 ce->features |= CLOCK_EVT_FEAT_C3STOP;
119 ce->rating = 450;
120 clockevents_config_and_register(ce, riscv_timebase, 100, ULONG_MAX);
46 riscv_clock_next_event(unsigned long delta, struct clock_event_device *ce) argument
H A Dhyperv_timer.c60 struct clock_event_device *ce; local
62 ce = this_cpu_ptr(hv_clock_event);
63 ce->event_handler(ce);
131 struct clock_event_device *ce; local
136 ce = per_cpu_ptr(hv_clock_event, cpu);
137 ce->name = "Hyper-V clockevent";
138 ce->features = CLOCK_EVT_FEAT_ONESHOT;
139 ce->cpumask = cpumask_of(cpu);
140 ce
157 struct clock_event_device *ce; local
[all...]
H A Dtimer-clint.c112 struct clock_event_device *ce)
131 struct clock_event_device *ce = per_cpu_ptr(&clint_clock_event, cpu); local
133 ce->cpumask = cpumask_of(cpu);
134 clockevents_config_and_register(ce, clint_timer_freq, 100, ULONG_MAX);
111 clint_clock_next_event(unsigned long delta, struct clock_event_device *ce) argument
/linux-master/scripts/kconfig/
H A Dexpr.h281 struct expr *expr_alloc_one(enum expr_type type, struct expr *ce);
/linux-master/drivers/video/fbdev/core/
H A Dsvgalib.c306 u8 ce = 0x0e; local
341 vga_wcrt(regbase, 0x0B, ce); /* set cursor end */
/linux-master/drivers/soc/fsl/qbman/
H A Dqman.c139 * ce == cache-enabled portal register
343 void *ce; /* cache-enabled */ member in struct:qm_addr
376 dpaa_invalidate(p->addr.ce + offset);
381 dpaa_touch_ro(p->addr.ce + offset);
430 eqcr->ring = portal->addr.ce + QM_CL_EQCR;
614 dqrr->ring = portal->addr.ce + QM_CL_DQRR;
768 mr->ring = portal->addr.ce + QM_CL_MR;
859 mc->cr = portal->addr.ce + QM_CL_CR;
860 mc->rr = portal->addr.ce + QM_CL_RR0;
1244 p->addr.ce
[all...]
/linux-master/include/linux/
H A Dhisi_acc_qm.h240 u32 ce; member in struct:hisi_qm_err_info
/linux-master/fs/ext4/
H A Dxattr.c1322 struct mb_cache_entry *ce; local
1325 ce = mb_cache_entry_get(ea_block_cache, hash,
1327 if (ce) {
1328 set_bit(MBE_REUSABLE_B, &ce->e_flags);
1329 mb_cache_entry_put(ea_block_cache, ce);
1521 struct mb_cache_entry *ce; local
1528 ce = mb_cache_entry_find_first(ea_inode_cache, hash);
1529 if (!ce)
1537 mb_cache_entry_put(ea_inode_cache, ce);
1541 while (ce) {
1925 struct mb_cache_entry *ce = NULL; local
3102 struct mb_cache_entry *ce; local
[all...]
/linux-master/drivers/crypto/hisilicon/zip/
H A Dzip_main.c627 u32 nfe, ce; local
637 ce = hisi_qm_get_hw_info(qm, zip_basic_cap_info, ZIP_CE_MASK_CAP, qm->cap_ver);
640 writel(ce | nfe | HZIP_CORE_INT_RAS_FE_ENB_MASK, qm->io_base + HZIP_CORE_INT_SOURCE);
643 writel(ce, qm->io_base + HZIP_CORE_INT_RAS_CE_ENB);
655 u32 nfe, ce; local
659 ce = hisi_qm_get_hw_info(qm, zip_basic_cap_info, ZIP_CE_MASK_CAP, qm->cap_ver);
660 writel(ce | nfe | HZIP_CORE_INT_RAS_FE_ENB_MASK, qm->io_base + HZIP_CORE_INT_MASK_REG);
1111 err_info->ce = hisi_qm_get_hw_info(qm, zip_basic_cap_info, ZIP_QM_CE_MASK_CAP, qm->cap_ver);
/linux-master/drivers/crypto/hisilicon/sec2/
H A Dsec_main.c661 u32 ce, nfe; local
669 ce = hisi_qm_get_hw_info(qm, sec_basic_info, SEC_CE_MASK_CAP, qm->cap_ver);
673 writel(ce | nfe | SEC_RAS_FE_ENB_MSK, qm->io_base + SEC_CORE_INT_SOURCE);
676 writel(ce, qm->io_base + SEC_RAS_CE_REG);
684 writel(ce | nfe | SEC_RAS_FE_ENB_MSK, qm->io_base + SEC_CORE_INT_MASK);
1035 err_info->ce = hisi_qm_get_hw_info(qm, sec_basic_info, SEC_QM_CE_MASK_CAP, qm->cap_ver);

Completed in 664 milliseconds

123456789