Searched refs:fctx (Results 1 - 25 of 33) sorted by relevance

12

/linux-master/drivers/gpu/drm/msm/
H A Dmsm_fence.c13 static struct msm_gpu *fctx2gpu(struct msm_fence_context *fctx) argument
15 struct msm_drm_private *priv = fctx->dev->dev_private;
21 struct msm_fence_context *fctx = container_of(t, local
24 kthread_queue_work(fctx2gpu(fctx)->worker, &fctx->deadline_work);
31 struct msm_fence_context *fctx = container_of(work, local
35 if (msm_fence_completed(fctx, fctx->next_deadline_fence))
38 msm_devfreq_boost(fctx2gpu(fctx), 2);
46 struct msm_fence_context *fctx; local
78 msm_fence_context_free(struct msm_fence_context *fctx) argument
83 msm_fence_completed(struct msm_fence_context *fctx, uint32_t fence) argument
94 msm_update_fence(struct msm_fence_context *fctx, uint32_t fence) argument
108 struct msm_fence_context *fctx; member in struct:msm_fence
136 struct msm_fence_context *fctx = f->fctx; local
188 msm_fence_init(struct dma_fence *fence, struct msm_fence_context *fctx) argument
[all...]
H A Dmsm_fence.h79 void msm_fence_context_free(struct msm_fence_context *fctx);
81 bool msm_fence_completed(struct msm_fence_context *fctx, uint32_t fence);
82 void msm_update_fence(struct msm_fence_context *fctx, uint32_t fence);
85 void msm_fence_init(struct dma_fence *fence, struct msm_fence_context *fctx);
H A Dmsm_ringbuffer.c17 struct msm_fence_context *fctx = submit->ring->fctx; local
22 msm_fence_init(submit->hw_fence, fctx);
115 ring->fctx = msm_fence_context_alloc(gpu->dev, &ring->memptrs->fence, name);
131 msm_fence_context_free(ring->fctx);
H A Dmsm_ringbuffer.h72 struct msm_fence_context *fctx; member in struct:msm_ringbuffer
H A Dmsm_gpu.c419 msm_update_fence(ring->fctx, fence);
522 } else if (fence_before(fence, ring->fctx->last_fence) &&
532 gpu->name, ring->fctx->last_fence);
538 if (fence_after(ring->fctx->last_fence, ring->hangcheck_fence))
742 msm_update_fence(gpu->rb[i]->fctx, gpu->rb[i]->memptrs->fence);
/linux-master/drivers/gpu/drm/nouveau/
H A Dnouveau_fence.c63 struct nouveau_fence_chan *fctx = nouveau_fctx(fence); local
65 if (!--fctx->notify_ref)
84 nouveau_fence_context_kill(struct nouveau_fence_chan *fctx, int error) argument
89 spin_lock_irqsave(&fctx->lock, flags);
90 while (!list_empty(&fctx->pending)) {
91 fence = list_entry(fctx->pending.next, typeof(*fence), head);
97 nvif_event_block(&fctx->event);
99 fctx->killed = 1;
100 spin_unlock_irqrestore(&fctx->lock, flags);
104 nouveau_fence_context_del(struct nouveau_fence_chan *fctx) argument
125 nouveau_fence_context_free(struct nouveau_fence_chan *fctx) argument
131 nouveau_fence_update(struct nouveau_channel *chan, struct nouveau_fence_chan *fctx) argument
152 struct nouveau_fence_chan *fctx = container_of(work, struct nouveau_fence_chan, local
176 struct nouveau_fence_chan *fctx = container_of(event, typeof(*fctx), event); local
182 nouveau_fence_context_new(struct nouveau_channel *chan, struct nouveau_fence_chan *fctx) argument
223 struct nouveau_fence_chan *fctx = chan->fence; local
263 struct nouveau_fence_chan *fctx = nouveau_fctx(fence); local
360 struct nouveau_fence_chan *fctx = chan->fence; local
461 struct nouveau_fence_chan *fctx = nouveau_fctx(fence); local
475 struct nouveau_fence_chan *fctx = nouveau_fctx(fence); local
516 struct nouveau_fence_chan *fctx = nouveau_fctx(fence); local
534 struct nouveau_fence_chan *fctx = nouveau_fctx(fence); local
[all...]
H A Dnv10_fence.c61 struct nv10_fence_chan *fctx = chan->fence; local
62 nouveau_fence_context_del(&fctx->base);
63 nvif_object_dtor(&fctx->sema);
65 nouveau_fence_context_free(&fctx->base);
71 struct nv10_fence_chan *fctx; local
73 fctx = chan->fence = kzalloc(sizeof(*fctx), GFP_KERNEL);
74 if (!fctx)
77 nouveau_fence_context_new(chan, &fctx->base);
78 fctx
[all...]
H A Dnv04_fence.c70 struct nv04_fence_chan *fctx = chan->fence; local
71 nouveau_fence_context_del(&fctx->base);
73 nouveau_fence_context_free(&fctx->base);
79 struct nv04_fence_chan *fctx = kzalloc(sizeof(*fctx), GFP_KERNEL); local
80 if (fctx) {
81 nouveau_fence_context_new(chan, &fctx->base);
82 fctx->base.emit = nv04_fence_emit;
83 fctx->base.sync = nv04_fence_sync;
84 fctx
[all...]
H A Dnv50_fence.c39 struct nv10_fence_chan *fctx; local
45 fctx = chan->fence = kzalloc(sizeof(*fctx), GFP_KERNEL);
46 if (!fctx)
49 nouveau_fence_context_new(chan, &fctx->base);
50 fctx->base.emit = nv10_fence_emit;
51 fctx->base.read = nv10_fence_read;
52 fctx->base.sync = nv17_fence_sync;
62 &fctx->sema);
H A Dnv84_fence.c89 struct nv84_fence_chan *fctx = chan->fence; local
90 u64 addr = fctx->vma->addr + nv84_fence_chid(chan) * 16;
92 return fctx->base.emit32(chan, addr, fence->base.seqno);
99 struct nv84_fence_chan *fctx = chan->fence; local
100 u64 addr = fctx->vma->addr + nv84_fence_chid(prev) * 16;
102 return fctx->base.sync32(chan, addr, fence->base.seqno);
116 struct nv84_fence_chan *fctx = chan->fence; local
118 nouveau_bo_wr32(priv->bo, nv84_fence_chid(chan) * 16 / 4, fctx->base.sequence);
120 nouveau_vma_del(&fctx->vma);
122 nouveau_fence_context_del(&fctx
131 struct nv84_fence_chan *fctx; local
[all...]
H A Dnv17_fence.c41 struct nv10_fence_chan *fctx = chan->fence; local
57 PUSH_MTHD(ppush, NV176E, SET_CONTEXT_DMA_SEMAPHORE, fctx->sema.handle,
65 PUSH_MTHD(npush, NV176E, SET_CONTEXT_DMA_SEMAPHORE, fctx->sema.handle,
81 struct nv10_fence_chan *fctx; local
86 fctx = chan->fence = kzalloc(sizeof(*fctx), GFP_KERNEL);
87 if (!fctx)
90 nouveau_fence_context_new(chan, &fctx->base);
91 fctx->base.emit = nv10_fence_emit;
92 fctx
[all...]
H A Dnvc0_fence.c82 struct nv84_fence_chan *fctx = chan->fence; local
83 fctx->base.emit32 = nvc0_fence_emit32;
84 fctx->base.sync32 = nvc0_fence_sync32;
/linux-master/drivers/crypto/cavium/nitrox/
H A Dnitrox_aead.c38 struct flexi_crypto_context *fctx; local
46 fctx = nctx->u.fctx;
47 flags.fu = be64_to_cpu(fctx->flags.f);
49 fctx->flags.f = cpu_to_be64(flags.fu);
52 memset(&fctx->crypto, 0, sizeof(fctx->crypto));
53 memcpy(fctx->crypto.u.key, key, keylen);
62 struct flexi_crypto_context *fctx = nctx->u.fctx; local
219 struct flexi_crypto_context *fctx = nctx->u.fctx; local
253 struct flexi_crypto_context *fctx = nctx->u.fctx; local
347 struct flexi_crypto_context *fctx = nctx->u.fctx; local
363 struct flexi_crypto_context *fctx = nctx->u.fctx; local
[all...]
H A Dnitrox_skcipher.c153 struct flexi_crypto_context *fctx = nctx->u.fctx; local
155 memzero_explicit(&fctx->crypto, sizeof(struct crypto_keys));
156 memzero_explicit(&fctx->auth, sizeof(struct auth_keys));
171 struct flexi_crypto_context *fctx; local
184 fctx = nctx->u.fctx;
185 flags = &fctx->flags;
192 memcpy(fctx->crypto.u.key, key, keylen);
341 struct flexi_crypto_context *fctx; local
365 struct flexi_crypto_context *fctx; local
[all...]
/linux-master/drivers/crypto/cavium/cpt/
H A Dcptvf_algs.c101 struct fc_context *fctx = &rctx->fctx; local
121 fctx->enc.enc_ctrl.e.enc_cipher = ctx->cipher_type;
122 fctx->enc.enc_ctrl.e.aes_key = ctx->key_type;
123 fctx->enc.enc_ctrl.e.iv_source = FROM_DPTR;
126 memcpy(fctx->enc.encr_key, ctx->enc_key, ctx->key_len * 2);
128 memcpy(fctx->enc.encr_key, ctx->enc_key, ctx->key_len);
129 ctrl_flags = (__be64 *)&fctx->enc.enc_ctrl.flags;
130 *ctrl_flags = cpu_to_be64(fctx->enc.enc_ctrl.flags);
142 req_info->in[*argcnt].vptr = (u8 *)fctx;
198 struct fc_context *fctx = &rctx->fctx; local
[all...]
H A Dcptvf_algs.h113 struct fc_context fctx; member in struct:cvm_req_ctx
/linux-master/drivers/crypto/marvell/octeontx/
H A Dotx_cptvf_algs.c107 if (memcmp(rctx->fctx.hmac.s.hmac_calc,
108 rctx->fctx.hmac.s.hmac_recv,
240 struct otx_cpt_fc_ctx *fctx = &rctx->fctx; local
272 fctx->enc.enc_ctrl.e.enc_cipher = ctx->cipher_type;
273 fctx->enc.enc_ctrl.e.aes_key = ctx->key_type;
274 fctx->enc.enc_ctrl.e.iv_source = OTX_CPT_FROM_CPTR;
277 memcpy(fctx->enc.encr_key, ctx->enc_key, ctx->key_len * 2);
279 memcpy(fctx->enc.encr_key, ctx->enc_key, ctx->key_len);
281 memcpy(fctx
967 struct otx_cpt_fc_ctx *fctx = &rctx->fctx; local
[all...]
H A Dotx_cptvf_algs.h162 struct otx_cpt_fc_ctx fctx; member in struct:otx_cpt_req_ctx
/linux-master/drivers/crypto/marvell/octeontx2/
H A Dotx2_cptvf_algs.c92 if (memcmp(rctx->fctx.hmac.s.hmac_calc,
93 rctx->fctx.hmac.s.hmac_recv,
226 struct otx2_cpt_fc_ctx *fctx = &rctx->fctx; local
258 fctx->enc.enc_ctrl.e.enc_cipher = ctx->cipher_type;
259 fctx->enc.enc_ctrl.e.aes_key = ctx->key_type;
260 fctx->enc.enc_ctrl.e.iv_source = OTX2_CPT_FROM_CPTR;
263 memcpy(fctx->enc.encr_key, ctx->enc_key, ctx->key_len * 2);
265 memcpy(fctx->enc.encr_key, ctx->enc_key, ctx->key_len);
267 memcpy(fctx
1048 struct otx2_cpt_fc_ctx *fctx = &rctx->fctx; local
[all...]
H A Dotx2_cptvf_algs.h151 struct otx2_cpt_fc_ctx fctx; member in struct:otx2_cpt_req_ctx
/linux-master/security/apparmor/
H A Dfile.c463 static void update_file_ctx(struct aa_file_ctx *fctx, struct aa_label *label, argument
469 spin_lock(&fctx->lock);
470 old = rcu_dereference_protected(fctx->label,
471 lockdep_is_held(&fctx->lock));
475 rcu_assign_pointer(fctx->label, l);
479 fctx->allow |= request;
481 spin_unlock(&fctx->lock);
588 struct aa_file_ctx *fctx; local
596 fctx = file_ctx(file);
599 flabel = rcu_dereference(fctx
[all...]
/linux-master/fs/
H A Duserfaultfd.c657 struct userfaultfd_fork_ctx *fctx; local
667 list_for_each_entry(fctx, fcs, list)
668 if (fctx->orig == octx) {
669 ctx = fctx->new;
674 fctx = kmalloc(sizeof(*fctx), GFP_KERNEL);
675 if (!fctx)
680 kfree(fctx);
697 fctx->orig = octx;
698 fctx
706 dup_fctx(struct userfaultfd_fork_ctx *fctx) argument
721 struct userfaultfd_fork_ctx *fctx, *n; local
[all...]
/linux-master/drivers/gpu/drm/nouveau/nvkm/engine/fifo/
H A Dnv04.c45 struct nvkm_memory *fctx = device->imem->ramfc; local
63 nvkm_kmap(fctx);
68 u32 cv = (nvkm_ro32(fctx, c->ctxp + data) & ~cm);
69 nvkm_wo32(fctx, c->ctxp + data, cv | (rv << c->ctxs));
71 nvkm_done(fctx);
/linux-master/drivers/gpu/drm/msm/adreno/
H A Dadreno_gpu.c577 ring->memptrs->bv_fence = ring->fctx->completed_fence;
583 if (fence_before(ring->fctx->last_fence, ring->memptrs->fence)) {
584 ring->memptrs->fence = ring->fctx->last_fence;
675 state->ring[i].seqno = gpu->rb[i]->fctx->last_fence;
929 ring->fctx->last_fence);
/linux-master/drivers/block/
H A Drbd.c2499 struct rbd_img_fill_ctx *fctx)
2504 img_req->data_type = fctx->pos_type;
2510 fctx->iter = *fctx->pos;
2517 fctx->set_pos_fn, &fctx->iter);
2530 * @fctx->pos data buffer.
2534 * different chunks of @fctx->pos data buffer.
2536 * @fctx->pos data buffer is assumed to be large enough.
2541 struct rbd_img_fill_ctx *fctx)
2496 rbd_img_fill_request_nocopy(struct rbd_img_request *img_req, struct ceph_file_extent *img_extents, u32 num_img_extents, struct rbd_img_fill_ctx *fctx) argument
2538 rbd_img_fill_request(struct rbd_img_request *img_req, struct ceph_file_extent *img_extents, u32 num_img_extents, struct rbd_img_fill_ctx *fctx) argument
2605 struct rbd_img_fill_ctx fctx = { local
2655 struct rbd_img_fill_ctx fctx = { local
2715 struct rbd_img_fill_ctx fctx = { local
[all...]

Completed in 224 milliseconds

12