Searched refs:ce (Results 126 - 150 of 212) sorted by relevance

123456789

/linux-master/drivers/gpu/drm/i915/gt/uc/
H A Dintel_gsc_fw.c239 struct intel_context *ce = gsc->ce; local
243 if (!ce)
246 rq = i915_request_create(ce);
250 if (ce->engine->emit_init_breadcrumb) {
251 err = ce->engine->emit_init_breadcrumb(rq);
260 err = ce->engine->emit_flush(rq, 0);
/linux-master/drivers/gpu/drm/i915/gt/
H A Dselftest_hangcheck.c364 struct intel_context *ce; local
367 ce = intel_context_create(engine);
368 if (IS_ERR(ce)) {
369 err = PTR_ERR(ce);
377 rq = intel_context_create_request(ce);
388 intel_context_put(ce);
438 struct intel_context *ce; local
450 ce = intel_context_create(engine);
451 if (IS_ERR(ce)) {
452 pr_err("[%s] Create context failed: %pe!\n", engine->name, ce);
558 struct intel_context *ce; local
897 struct intel_context *ce[ARRAY_SIZE(rq)]; local
[all...]
H A Dintel_engine_cs.c1301 static int measure_breadcrumb_dw(struct intel_context *ce) argument
1303 struct intel_engine_cs *engine = ce->engine;
1315 frame->rq.context = ce;
1316 rcu_assign_pointer(frame->rq.timeline, ce->timeline);
1317 frame->rq.hwsp_seqno = ce->timeline->hwsp_seqno;
1327 mutex_lock(&ce->timeline->mutex);
1333 mutex_unlock(&ce->timeline->mutex);
1349 struct intel_context *ce; local
1352 ce = intel_context_create(engine);
1353 if (IS_ERR(ce))
1383 intel_engine_destroy_pinned_context(struct intel_context *ce) argument
1436 struct intel_context *ce, *bce = NULL; local
2508 intel_engine_get_hung_entity(struct intel_engine_cs *engine, struct intel_context **ce, struct i915_request **rq) argument
[all...]
H A Dselftest_engine_heartbeat.c206 struct intel_context *ce; local
213 ce = intel_context_create(engine);
214 if (IS_ERR(ce))
215 return PTR_ERR(ce);
281 intel_context_put(ce);
H A Dintel_ggtt.c299 struct intel_context *ce; local
305 ce = gt->engine[BCS0]->bind_context;
306 GEM_BUG_ON(!ce);
318 intel_engine_pm_get(ce->engine);
320 return ce;
323 static void gen8_ggtt_bind_put_ce(struct intel_context *ce, intel_wakeref_t wakeref) argument
325 intel_engine_pm_put(ce->engine);
326 intel_gt_pm_put(ce->engine->gt, wakeref);
338 struct intel_context *ce; local
345 ce
[all...]
H A Dintel_engine.h256 struct intel_context **ce, struct i915_request **rq);
267 void intel_engine_destroy_pinned_context(struct intel_context *ce);
338 struct intel_context *ce)
340 engine->hung_ce = ce;
337 intel_engine_set_hung_context(struct intel_engine_cs *engine, struct intel_context *ce) argument
H A Dintel_gt.c545 struct intel_context *ce; local
551 ce = intel_context_create(engine);
552 if (IS_ERR(ce)) {
553 err = PTR_ERR(ce);
557 err = intel_renderstate_init(&so, ce);
561 rq = i915_request_create(ce);
579 intel_renderstate_fini(&so, ce);
582 intel_context_put(ce);
629 struct intel_context *ce; local
636 ce
[all...]
/linux-master/drivers/gpu/drm/i915/
H A Di915_request.c300 struct intel_context *ce = rq->context; local
302 if (!ce->watchdog.timeout_us)
310 ns_to_ktime(ce->watchdog.timeout_us *
894 __i915_request_create(struct intel_context *ce, gfp_t gfp) argument
896 struct intel_timeline *tl = ce->timeline;
904 __intel_context_pin(ce);
938 rq = request_alloc_slow(tl, &ce->engine->request_pool, gfp);
945 rq->context = ce;
946 rq->engine = ce->engine;
947 rq->ring = ce
1028 i915_request_create(struct intel_context *ce) argument
[all...]
/linux-master/drivers/gpu/drm/i915/gem/selftests/
H A Di915_gem_migrate.c393 struct intel_context *ce; local
397 ce = intel_context_create(engine);
398 if (IS_ERR(ce)) {
399 err = PTR_ERR(ce);
411 rq = igt_spinner_create_request(&spin, ce, MI_NOOP);
412 intel_context_put(ce);
H A Di915_gem_dmabuf.c154 struct intel_context *ce; local
171 for_each_gem_engine(ce, i915_gem_context_lock_engines(ctx), it) {
172 if (intel_engine_can_store_dword(ce->engine))
176 if (!ce)
179 vma = i915_vma_instance(import_obj, ce->vm, NULL);
189 err = igt_gpu_fill_dw(ce, vma, 0,
/linux-master/drivers/gpu/drm/nouveau/nvkm/engine/device/
H A Dpriv.h34 #include <engine/ce.h>
H A Dbase.c1161 .ce = { 0x00000001, gt215_ce_new },
1195 .ce = { 0x00000001, gt215_ce_new },
1228 .ce = { 0x00000001, gt215_ce_new },
1325 .ce = { 0x00000001, gt215_ce_new },
1361 .ce = { 0x00000003, gf100_ce_new },
1397 .ce = { 0x00000001, gf100_ce_new },
1433 .ce = { 0x00000001, gf100_ce_new },
1469 .ce = { 0x00000003, gf100_ce_new },
1505 .ce = { 0x00000003, gf100_ce_new },
1541 .ce
[all...]
/linux-master/drivers/gpu/drm/i915/selftests/
H A Di915_gem.c24 struct intel_context *ce; local
27 for_each_gem_engine(ce, i915_gem_context_lock_engines(ctx), it) {
30 rq = intel_context_create_request(ce);
H A Di915_gem_evict.c456 struct intel_context *ce; local
459 ce = intel_context_create(engine);
460 if (IS_ERR(ce))
465 rq = intel_context_create_request(ce);
467 intel_context_put(ce);
/linux-master/arch/sparc/kernel/
H A Dleon_kernel.c282 struct clock_event_device *ce; local
290 ce = &per_cpu(sparc32_clockevent, cpu);
293 if (ce->event_handler)
294 ce->event_handler(ce);
/linux-master/drivers/soc/fsl/qbman/
H A Dbman.c87 * ce == cache-enabled portal register
179 void *ce; /* cache-enabled */ member in struct:bm_addr
204 dpaa_invalidate(p->addr.ce + offset);
209 dpaa_touch_ro(p->addr.ce + offset);
387 rcr->ring = portal->addr.ce + BM_CL_RCR;
432 mc->cr = portal->addr.ce + BM_CL_CR;
433 mc->rr = portal->addr.ce + BM_CL_RR0;
538 p->addr.ce = c->addr_virt_ce;
/linux-master/fs/ext2/
H A Dxattr.c941 struct mb_cache_entry *ce; local
948 ce = mb_cache_entry_find_first(ea_block_cache, hash);
949 while (ce) {
952 bh = sb_bread(inode->i_sb, ce->e_value);
956 inode->i_ino, (unsigned long) ce->e_value);
962 (unsigned long) ce->e_value,
968 mb_cache_entry_touch(ea_block_cache, ce);
969 mb_cache_entry_put(ea_block_cache, ce);
975 ce = mb_cache_entry_find_next(ea_block_cache, ce);
[all...]
/linux-master/tools/testing/selftests/powerpc/nx-gzip/include/
H A Dcrb.h79 __u8 ce; member in struct:coprocessor_status_block
/linux-master/arch/powerpc/include/asm/
H A Dicswx.h90 u8 ce; member in struct:coprocessor_status_block
/linux-master/fs/ext4/
H A Dxattr.c1322 struct mb_cache_entry *ce; local
1325 ce = mb_cache_entry_get(ea_block_cache, hash,
1327 if (ce) {
1328 set_bit(MBE_REUSABLE_B, &ce->e_flags);
1329 mb_cache_entry_put(ea_block_cache, ce);
1521 struct mb_cache_entry *ce; local
1528 ce = mb_cache_entry_find_first(ea_inode_cache, hash);
1529 if (!ce)
1537 mb_cache_entry_put(ea_inode_cache, ce);
1541 while (ce) {
1897 struct mb_cache_entry *ce = NULL; local
3111 struct mb_cache_entry *ce; local
[all...]
/linux-master/drivers/net/wireless/ath/ath10k/
H A Dsnoc.c19 #include "ce.h"
495 struct ath10k_ce *ce = ath10k_ce_priv(ar); local
517 spin_lock_bh(&ce->ce_lock);
519 spin_unlock_bh(&ce->ce_lock);
533 struct ath10k_ce *ce = ath10k_ce_priv(ar); local
544 spin_lock_bh(&ce->ce_lock);
546 spin_unlock_bh(&ce->ce_lock);
601 ath10k_dbg(ar, ATH10K_DBG_SNOC, "snoc rx ce pipe %d len %d\n",
690 struct ath10k_ce *ce = ath10k_ce_priv(ar); local
697 spin_lock_bh(&ce
1389 struct ath10k_ce *ce = ath10k_ce_priv(ar); local
[all...]
H A Dce.h163 * ce - which copy engine to use
378 struct ath10k_ce *ce = ath10k_ce_priv(ar); local
381 ce->bus_ops->read32((ar), CE_WRAPPER_BASE_ADDRESS +
/linux-master/drivers/mtd/nand/raw/
H A Dlpc32xx_slc.c410 uint32_t ce = ecc[i / 3]; local
411 ce = ~(ce << 2) & 0xFFFFFF;
412 spare[i + 2] = (uint8_t)(ce & 0xFF);
413 ce >>= 8;
414 spare[i + 1] = (uint8_t)(ce & 0xFF);
415 ce >>= 8;
416 spare[i] = (uint8_t)(ce & 0xFF);
/linux-master/drivers/gpu/drm/nouveau/include/nvkm/core/
H A Dlayout.h32 NVKM_LAYOUT_INST(NVKM_ENGINE_CE , struct nvkm_engine , ce, 10)
/linux-master/drivers/gpu/drm/i915/gvt/
H A Dscheduler.c345 shadow_context_descriptor_update(struct intel_context *ce, argument
348 u64 desc = ce->lrc.desc;
358 ce->lrc.desc = desc;
435 struct intel_context *ce)
438 struct i915_ppgtt *ppgtt = i915_vm_to_ppgtt(ce->vm);
1398 struct intel_context *ce; local
1403 ce = intel_context_create(engine);
1404 if (IS_ERR(ce)) {
1405 ret = PTR_ERR(ce);
1409 i915_vm_put(ce
434 set_context_ppgtt_from_shadow(struct intel_vgpu_workload *workload, struct intel_context *ce) argument
[all...]

Completed in 288 milliseconds

123456789