Searched refs:rctx (Results 1 - 25 of 89) sorted by relevance

1234

/linux-master/drivers/crypto/ccp/
H A Dccp-crypto-aes-cmac.c28 struct ccp_aes_cmac_req_ctx *rctx = ahash_request_ctx_dma(req); local
34 if (rctx->hash_rem) {
36 unsigned int offset = rctx->nbytes - rctx->hash_rem;
38 scatterwalk_map_and_copy(rctx->buf, rctx->src,
39 offset, rctx->hash_rem, 0);
40 rctx->buf_count = rctx->hash_rem;
42 rctx
60 struct ccp_aes_cmac_req_ctx *rctx = ahash_request_ctx_dma(req); local
185 struct ccp_aes_cmac_req_ctx *rctx = ahash_request_ctx_dma(req); local
222 struct ccp_aes_cmac_req_ctx *rctx = ahash_request_ctx_dma(req); local
241 struct ccp_aes_cmac_req_ctx *rctx = ahash_request_ctx_dma(req); local
[all...]
H A Dccp-crypto-sha.c31 struct ccp_sha_req_ctx *rctx = ahash_request_ctx_dma(req); local
37 if (rctx->hash_rem) {
39 unsigned int offset = rctx->nbytes - rctx->hash_rem;
41 scatterwalk_map_and_copy(rctx->buf, rctx->src,
42 offset, rctx->hash_rem, 0);
43 rctx->buf_count = rctx->hash_rem;
45 rctx
63 struct ccp_sha_req_ctx *rctx = ahash_request_ctx_dma(req); local
186 struct ccp_sha_req_ctx *rctx = ahash_request_ctx_dma(req); local
234 struct ccp_sha_req_ctx *rctx = ahash_request_ctx_dma(req); local
255 struct ccp_sha_req_ctx *rctx = ahash_request_ctx_dma(req); local
[all...]
H A Dccp-crypto-aes-galois.c80 struct ccp_aes_req_ctx *rctx = aead_request_ctx_dma(req); local
105 memcpy(rctx->iv, req->iv, GCM_AES_IV_SIZE);
107 rctx->iv[i + GCM_AES_IV_SIZE] = 0;
108 rctx->iv[AES_BLOCK_SIZE - 1] = 1;
111 iv_sg = &rctx->iv_sg;
113 sg_init_one(iv_sg, rctx->iv, iv_len);
116 memset(&rctx->cmd, 0, sizeof(rctx->cmd));
117 INIT_LIST_HEAD(&rctx->cmd.entry);
118 rctx
[all...]
H A Dccp-crypto-des3.c26 struct ccp_des3_req_ctx *rctx = skcipher_request_ctx_dma(req); local
32 memcpy(req->iv, rctx->iv, DES3_EDE_BLOCK_SIZE);
65 struct ccp_des3_req_ctx *rctx = skcipher_request_ctx_dma(req); local
81 memcpy(rctx->iv, req->iv, DES3_EDE_BLOCK_SIZE);
82 iv_sg = &rctx->iv_sg;
84 sg_init_one(iv_sg, rctx->iv, iv_len);
87 memset(&rctx->cmd, 0, sizeof(rctx->cmd));
88 INIT_LIST_HEAD(&rctx->cmd.entry);
89 rctx
[all...]
H A Dccp-crypto-aes-xts.c65 struct ccp_aes_req_ctx *rctx = skcipher_request_ctx_dma(req); local
70 memcpy(req->iv, rctx->iv, AES_BLOCK_SIZE);
109 struct ccp_aes_req_ctx *rctx = skcipher_request_ctx_dma(req); local
151 skcipher_request_set_tfm(&rctx->fallback_req,
153 skcipher_request_set_callback(&rctx->fallback_req,
157 skcipher_request_set_crypt(&rctx->fallback_req, req->src,
159 ret = encrypt ? crypto_skcipher_encrypt(&rctx->fallback_req) :
160 crypto_skcipher_decrypt(&rctx->fallback_req);
164 memcpy(rctx->iv, req->iv, AES_BLOCK_SIZE);
165 sg_init_one(&rctx
[all...]
H A Dccp-crypto-aes.c27 struct ccp_aes_req_ctx *rctx = skcipher_request_ctx_dma(req); local
33 memcpy(req->iv, rctx->iv, AES_BLOCK_SIZE);
70 struct ccp_aes_req_ctx *rctx = skcipher_request_ctx_dma(req); local
86 memcpy(rctx->iv, req->iv, AES_BLOCK_SIZE);
87 iv_sg = &rctx->iv_sg;
89 sg_init_one(iv_sg, rctx->iv, iv_len);
92 memset(&rctx->cmd, 0, sizeof(rctx->cmd));
93 INIT_LIST_HEAD(&rctx->cmd.entry);
94 rctx
136 struct ccp_aes_req_ctx *rctx = skcipher_request_ctx_dma(req); local
162 struct ccp_aes_req_ctx *rctx = skcipher_request_ctx_dma(req); local
[all...]
/linux-master/drivers/crypto/aspeed/
H A Daspeed-hace-hash.c78 struct aspeed_sham_reqctx *rctx)
83 AHASH_DBG(hace_dev, "rctx flags:0x%x\n", (u32)rctx->flags);
85 switch (rctx->flags & SHA_FLAGS_MASK) {
89 bits[0] = cpu_to_be64(rctx->digcnt[0] << 3);
90 index = rctx->bufcnt & 0x3f;
92 *(rctx->buffer + rctx->bufcnt) = 0x80;
93 memset(rctx->buffer + rctx
77 aspeed_ahash_fill_padding(struct aspeed_hace_dev *hace_dev, struct aspeed_sham_reqctx *rctx) argument
119 struct aspeed_sham_reqctx *rctx = ahash_request_ctx(req); local
168 struct aspeed_sham_reqctx *rctx = ahash_request_ctx(req); local
293 struct aspeed_sham_reqctx *rctx = ahash_request_ctx(req); local
316 struct aspeed_sham_reqctx *rctx = ahash_request_ctx(req); local
350 struct aspeed_sham_reqctx *rctx = ahash_request_ctx(req); local
409 struct aspeed_sham_reqctx *rctx = ahash_request_ctx(req); local
457 struct aspeed_sham_reqctx *rctx = ahash_request_ctx(req); local
488 struct aspeed_sham_reqctx *rctx = ahash_request_ctx(req); local
505 struct aspeed_sham_reqctx *rctx = ahash_request_ctx(req); local
536 struct aspeed_sham_reqctx *rctx = ahash_request_ctx(req); local
583 struct aspeed_sham_reqctx *rctx = ahash_request_ctx(req); local
624 struct aspeed_sham_reqctx *rctx = ahash_request_ctx(req); local
638 struct aspeed_sham_reqctx *rctx = ahash_request_ctx(req); local
663 struct aspeed_sham_reqctx *rctx = ahash_request_ctx(req); local
835 struct aspeed_sham_reqctx *rctx = ahash_request_ctx(req); local
844 struct aspeed_sham_reqctx *rctx = ahash_request_ctx(req); local
[all...]
/linux-master/drivers/crypto/qce/
H A Daead.c27 struct qce_aead_reqctx *rctx = aead_request_ctx_dma(req); local
49 dma_unmap_sg(qce->dev, rctx->src_sg, rctx->src_nents, dir_src);
51 dma_unmap_sg(qce->dev, rctx->dst_sg, rctx->dst_nents, dir_dst);
53 if (IS_CCM(rctx->flags)) {
55 sg_free_table(&rctx->src_tbl);
57 sg_free_table(&rctx->dst_tbl);
59 if (!(IS_DECRYPT(rctx->flags) && !diff_dst))
60 sg_free_table(&rctx
95 struct qce_aead_reqctx *rctx = aead_request_ctx_dma(req); local
106 struct qce_aead_reqctx *rctx = aead_request_ctx_dma(req); local
115 struct qce_aead_reqctx *rctx = aead_request_ctx_dma(req); local
189 struct qce_aead_reqctx *rctx = aead_request_ctx_dma(req); local
303 struct qce_aead_reqctx *rctx = aead_request_ctx_dma(req); local
331 struct qce_aead_reqctx *rctx = aead_request_ctx_dma(req); local
361 qce_aead_create_ccm_nonce(struct qce_aead_reqctx *rctx, struct qce_aead_ctx *ctx) argument
411 struct qce_aead_reqctx *rctx = aead_request_ctx_dma(req); local
505 struct qce_aead_reqctx *rctx = aead_request_ctx_dma(req); local
[all...]
H A Dsha.c41 struct qce_sha_reqctx *rctx = ahash_request_ctx_dma(req); local
53 dma_unmap_sg(qce->dev, req->src, rctx->src_nents, DMA_TO_DEVICE);
54 dma_unmap_sg(qce->dev, &rctx->result_sg, 1, DMA_FROM_DEVICE);
56 memcpy(rctx->digest, result->auth_iv, digestsize);
57 if (req->result && rctx->last_blk)
60 rctx->byte_count[0] = cpu_to_be32(result->auth_byte_count[0]);
61 rctx->byte_count[1] = cpu_to_be32(result->auth_byte_count[1]);
67 req->src = rctx->src_orig;
68 req->nbytes = rctx->nbytes_orig;
69 rctx
78 struct qce_sha_reqctx *rctx = ahash_request_ctx_dma(req); local
135 struct qce_sha_reqctx *rctx = ahash_request_ctx_dma(req); local
150 struct qce_sha_reqctx *rctx = ahash_request_ctx_dma(req); local
167 struct qce_sha_reqctx *rctx = ahash_request_ctx_dma(req); local
186 struct qce_sha_reqctx *rctx = ahash_request_ctx_dma(req); local
278 struct qce_sha_reqctx *rctx = ahash_request_ctx_dma(req); local
305 struct qce_sha_reqctx *rctx = ahash_request_ctx_dma(req); local
[all...]
H A Dskcipher.c31 struct qce_cipher_reqctx *rctx = skcipher_request_ctx(req); local
50 dma_unmap_sg(qce->dev, rctx->src_sg, rctx->src_nents, dir_src);
51 dma_unmap_sg(qce->dev, rctx->dst_sg, rctx->dst_nents, dir_dst);
53 sg_free_table(&rctx->dst_tbl);
59 memcpy(rctx->iv, result_buf->encr_cntr_iv, rctx->ivsize);
67 struct qce_cipher_reqctx *rctx = skcipher_request_ctx(req); local
77 rctx
260 struct qce_cipher_reqctx *rctx = skcipher_request_ctx(req); local
[all...]
H A Dcommon.c151 struct qce_sha_reqctx *rctx = ahash_request_ctx_dma(req); local
162 if (!rctx->last_blk && req->nbytes % blocksize)
167 if (IS_CMAC(rctx->flags)) {
175 auth_cfg = qce_auth_cfg(rctx->flags, rctx->authklen, digestsize);
178 if (IS_SHA_HMAC(rctx->flags) || IS_CMAC(rctx->flags)) {
179 u32 authkey_words = rctx->authklen / sizeof(u32);
181 qce_cpu_to_be32p_array(mackey, rctx->authkey, rctx
317 struct qce_cipher_reqctx *rctx = skcipher_request_ctx(req); local
423 struct qce_aead_reqctx *rctx = aead_request_ctx_dma(req); local
[all...]
/linux-master/drivers/crypto/intel/keembay/
H A Dkeembay-ocs-hcu-core.c115 static inline unsigned int kmb_get_total_data(struct ocs_hcu_rctx *rctx) argument
117 return rctx->sg_data_total + rctx->buf_cnt;
121 static int flush_sg_to_ocs_buffer(struct ocs_hcu_rctx *rctx) argument
125 if (rctx->sg_data_total > (sizeof(rctx->buffer) - rctx->buf_cnt)) {
130 while (rctx->sg_data_total) {
131 if (!rctx->sg) {
139 if (rctx
187 kmb_ocs_hcu_dma_cleanup(struct ahash_request *req, struct ocs_hcu_rctx *rctx) argument
229 struct ocs_hcu_rctx *rctx = ahash_request_ctx_dma(req); local
359 struct ocs_hcu_rctx *rctx = ahash_request_ctx_dma(req); local
377 struct ocs_hcu_rctx *rctx = ahash_request_ctx_dma(req); local
417 struct ocs_hcu_rctx *rctx = ahash_request_ctx_dma(req); local
564 struct ocs_hcu_rctx *rctx = ahash_request_ctx_dma(req); local
617 struct ocs_hcu_rctx *rctx = ahash_request_ctx_dma(req); local
653 struct ocs_hcu_rctx *rctx = ahash_request_ctx_dma(req); local
690 struct ocs_hcu_rctx *rctx = ahash_request_ctx_dma(req); local
701 struct ocs_hcu_rctx *rctx = ahash_request_ctx_dma(req); local
729 struct ocs_hcu_rctx *rctx = ahash_request_ctx_dma(req); local
739 struct ocs_hcu_rctx *rctx = ahash_request_ctx_dma(req); local
[all...]
H A Dkeembay-ocs-aes-core.c240 static void ocs_aes_init_rctx(struct ocs_aes_rctx *rctx) argument
243 memset(rctx, 0, sizeof(*rctx));
246 rctx->src_dll.dma_addr = DMA_MAPPING_ERROR;
247 rctx->dst_dll.dma_addr = DMA_MAPPING_ERROR;
248 rctx->aad_src_dll.dma_addr = DMA_MAPPING_ERROR;
249 rctx->aad_dst_dll.dma_addr = DMA_MAPPING_ERROR;
314 struct ocs_aes_rctx *rctx = skcipher_request_ctx(req); local
357 ocs_aes_init_rctx(rctx);
358 rctx
377 struct ocs_aes_rctx *rctx = skcipher_request_ctx(req); local
401 struct ocs_aes_rctx *rctx = skcipher_request_ctx(req); local
450 struct ocs_aes_rctx *rctx = skcipher_request_ctx(req); local
517 struct ocs_aes_rctx *rctx = skcipher_request_ctx(req); local
641 struct ocs_aes_rctx *rctx = aead_request_ctx(req); local
685 struct ocs_aes_rctx *rctx = aead_request_ctx(req); local
723 struct ocs_aes_rctx *rctx = aead_request_ctx(req); local
891 struct ocs_aes_rctx *rctx = aead_request_ctx(req); local
[all...]
/linux-master/drivers/crypto/tegra/
H A Dtegra-se-aes.c100 struct tegra_aes_reqctx *rctx = skcipher_request_ctx(req); local
105 if (rctx->encrypt)
106 memcpy(req->iv, rctx->datbuf.buf + offset, ctx->ivsize);
203 struct tegra_aes_reqctx *rctx)
208 dma_addr_t addr = rctx->datbuf.addr;
210 data_count = rctx->len / AES_BLOCK_SIZE;
211 res_bits = (rctx->len % AES_BLOCK_SIZE) * 8;
220 if (rctx->iv) {
224 cpuvaddr[i++] = rctx->iv[j];
232 cpuvaddr[i++] = rctx
202 tegra_aes_prep_cmd(struct tegra_aes_ctx *ctx, struct tegra_aes_reqctx *rctx) argument
261 struct tegra_aes_reqctx *rctx = skcipher_request_ctx(req); local
427 struct tegra_aes_reqctx *rctx; local
557 tegra_gmac_prep_cmd(struct tegra_aead_ctx *ctx, struct tegra_aead_reqctx *rctx) argument
597 tegra_gcm_crypt_prep_cmd(struct tegra_aead_ctx *ctx, struct tegra_aead_reqctx *rctx) argument
657 tegra_gcm_prep_final_cmd(struct tegra_se *se, u32 *cpuvaddr, struct tegra_aead_reqctx *rctx) argument
708 tegra_gcm_do_gmac(struct tegra_aead_ctx *ctx, struct tegra_aead_reqctx *rctx) argument
725 tegra_gcm_do_crypt(struct tegra_aead_ctx *ctx, struct tegra_aead_reqctx *rctx) argument
750 tegra_gcm_do_final(struct tegra_aead_ctx *ctx, struct tegra_aead_reqctx *rctx) argument
776 tegra_gcm_do_verify(struct tegra_se *se, struct tegra_aead_reqctx *rctx) argument
804 tegra_cbcmac_prep_cmd(struct tegra_aead_ctx *ctx, struct tegra_aead_reqctx *rctx) argument
839 tegra_ctr_prep_cmd(struct tegra_aead_ctx *ctx, struct tegra_aead_reqctx *rctx) argument
881 tegra_ccm_do_cbcmac(struct tegra_aead_ctx *ctx, struct tegra_aead_reqctx *rctx) argument
915 tegra_ccm_format_nonce(struct tegra_aead_reqctx *rctx, u8 *nonce) argument
971 tegra_ccm_format_blocks(struct tegra_aead_reqctx *rctx) argument
1000 tegra_ccm_mac_result(struct tegra_se *se, struct tegra_aead_reqctx *rctx) argument
1023 tegra_ccm_ctr_result(struct tegra_se *se, struct tegra_aead_reqctx *rctx) argument
1039 tegra_ccm_compute_auth(struct tegra_aead_ctx *ctx, struct tegra_aead_reqctx *rctx) argument
1067 tegra_ccm_do_ctr(struct tegra_aead_ctx *ctx, struct tegra_aead_reqctx *rctx) argument
1108 tegra_ccm_crypt_init(struct aead_request *req, struct tegra_se *se, struct tegra_aead_reqctx *rctx) argument
1142 struct tegra_aead_reqctx *rctx = aead_request_ctx(req); local
1215 struct tegra_aead_reqctx *rctx = aead_request_ctx(req); local
1360 struct tegra_aead_reqctx *rctx = aead_request_ctx(req); local
1390 tegra_cmac_prep_cmd(struct tegra_cmac_ctx *ctx, struct tegra_cmac_reqctx *rctx) argument
1444 tegra_cmac_copy_result(struct tegra_se *se, struct tegra_cmac_reqctx *rctx) argument
1452 tegra_cmac_paste_result(struct tegra_se *se, struct tegra_cmac_reqctx *rctx) argument
1463 struct tegra_cmac_reqctx *rctx = ahash_request_ctx(req); local
1539 struct tegra_cmac_reqctx *rctx = ahash_request_ctx(req); local
1580 struct tegra_cmac_reqctx *rctx = ahash_request_ctx(req); local
1662 struct tegra_cmac_reqctx *rctx = ahash_request_ctx(req); local
1721 struct tegra_cmac_reqctx *rctx = ahash_request_ctx(req); local
1732 struct tegra_cmac_reqctx *rctx = ahash_request_ctx(req); local
1743 struct tegra_cmac_reqctx *rctx = ahash_request_ctx(req); local
1754 struct tegra_cmac_reqctx *rctx = ahash_request_ctx(req); local
1764 struct tegra_cmac_reqctx *rctx = ahash_request_ctx(req); local
1773 struct tegra_cmac_reqctx *rctx = ahash_request_ctx(req); local
[all...]
H A Dtegra-se-hash.c114 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); local
118 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
119 rctx->fallback_req.base.flags = req->base.flags &
122 return crypto_ahash_init(&rctx->fallback_req);
127 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); local
131 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
132 rctx->fallback_req.base.flags = req->base.flags &
134 rctx->fallback_req.nbytes = req->nbytes;
135 rctx->fallback_req.src = req->src;
137 return crypto_ahash_update(&rctx
142 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); local
156 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); local
173 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); local
190 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); local
203 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); local
214 tegra_sha_prep_cmd(struct tegra_se *se, u32 *cpuvaddr, struct tegra_sha_reqctx *rctx) argument
282 tegra_sha_copy_hash_result(struct tegra_se *se, struct tegra_sha_reqctx *rctx) argument
290 tegra_sha_paste_hash_result(struct tegra_se *se, struct tegra_sha_reqctx *rctx) argument
302 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); local
375 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); local
411 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); local
502 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); local
573 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); local
587 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); local
601 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); local
615 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); local
630 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); local
644 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); local
[all...]
/linux-master/drivers/crypto/starfive/
H A Djh7110-rsa.c76 struct starfive_cryp_request_ctx *rctx = ctx->rctx; local
77 int count = rctx->total / sizeof(u32) - 1;
83 rctx->csr.pka.v = 0;
85 writel(rctx->csr.pka.v, cryp->base + STARFIVE_PKA_CACR_OFFSET);
91 rctx->csr.pka.v = 0;
92 rctx->csr.pka.cln_done = 1;
93 rctx->csr.pka.opsize = opsize;
94 rctx->csr.pka.exposize = opsize;
95 rctx
173 struct starfive_cryp_request_ctx *rctx = ctx->rctx; local
252 struct starfive_cryp_request_ctx *rctx = ctx->rctx; local
288 struct starfive_cryp_request_ctx *rctx = akcipher_request_ctx(req); local
320 struct starfive_cryp_request_ctx *rctx = akcipher_request_ctx(req); local
[all...]
/linux-master/drivers/crypto/cavium/nitrox/
H A Dnitrox_aead.c151 static int nitrox_set_creq(struct nitrox_aead_rctx *rctx) argument
153 struct se_crypto_request *creq = &rctx->nkreq.creq;
157 creq->flags = rctx->flags;
158 creq->gfp = (rctx->flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? GFP_KERNEL :
163 creq->ctrl.s.arg = rctx->ctrl_arg;
165 creq->gph.param0 = cpu_to_be16(rctx->cryptlen);
166 creq->gph.param1 = cpu_to_be16(rctx->cryptlen + rctx->assoclen);
167 creq->gph.param2 = cpu_to_be16(rctx->ivsize + rctx
193 struct nitrox_aead_rctx *rctx = aead_request_ctx(areq); local
217 struct nitrox_aead_rctx *rctx = aead_request_ctx(areq); local
251 struct nitrox_aead_rctx *rctx = aead_request_ctx(areq); local
395 struct nitrox_rfc4106_rctx *rctx = aead_request_ctx_dma(areq); local
427 struct nitrox_rfc4106_rctx *rctx = aead_request_ctx_dma(areq); local
444 struct nitrox_rfc4106_rctx *rctx = aead_request_ctx_dma(areq); local
476 struct nitrox_rfc4106_rctx *rctx = aead_request_ctx(areq); local
[all...]
/linux-master/drivers/crypto/rockchip/
H A Drk3288_crypto_ahash.c46 struct rk_ahash_rctx *rctx = ahash_request_ctx(areq); local
54 ahash_request_set_tfm(&rctx->fallback_req, tfmctx->fallback_tfm);
55 rctx->fallback_req.base.flags = areq->base.flags &
58 rctx->fallback_req.nbytes = areq->nbytes;
59 rctx->fallback_req.src = areq->src;
60 rctx->fallback_req.result = areq->result;
62 return crypto_ahash_digest(&rctx->fallback_req);
90 struct rk_ahash_rctx *rctx = ahash_request_ctx(req); local
110 CRYPTO_WRITE(dev, RK_CRYPTO_HASH_CTRL, rctx->mode |
122 struct rk_ahash_rctx *rctx local
135 struct rk_ahash_rctx *rctx = ahash_request_ctx(req); local
150 struct rk_ahash_rctx *rctx = ahash_request_ctx(req); local
164 struct rk_ahash_rctx *rctx = ahash_request_ctx(req); local
181 struct rk_ahash_rctx *rctx = ahash_request_ctx(req); local
194 struct rk_ahash_rctx *rctx = ahash_request_ctx(req); local
207 struct rk_ahash_rctx *rctx = ahash_request_ctx(req); local
236 struct rk_ahash_rctx *rctx = ahash_request_ctx(areq); local
252 struct rk_ahash_rctx *rctx = ahash_request_ctx(areq); local
262 struct rk_ahash_rctx *rctx = ahash_request_ctx(areq); local
[all...]
H A Drk3288_crypto_skcipher.c72 struct rk_cipher_rctx *rctx = skcipher_request_ctx(areq); local
79 skcipher_request_set_tfm(&rctx->fallback_req, op->fallback_tfm);
80 skcipher_request_set_callback(&rctx->fallback_req, areq->base.flags,
82 skcipher_request_set_crypt(&rctx->fallback_req, areq->src, areq->dst,
84 if (rctx->mode & RK_CRYPTO_DEC)
85 err = crypto_skcipher_decrypt(&rctx->fallback_req);
87 err = crypto_skcipher_encrypt(&rctx->fallback_req);
93 struct rk_cipher_rctx *rctx = skcipher_request_ctx(req); local
103 rctx->dev = rkc;
157 struct rk_cipher_rctx *rctx local
165 struct rk_cipher_rctx *rctx = skcipher_request_ctx(req); local
173 struct rk_cipher_rctx *rctx = skcipher_request_ctx(req); local
181 struct rk_cipher_rctx *rctx = skcipher_request_ctx(req); local
189 struct rk_cipher_rctx *rctx = skcipher_request_ctx(req); local
197 struct rk_cipher_rctx *rctx = skcipher_request_ctx(req); local
205 struct rk_cipher_rctx *rctx = skcipher_request_ctx(req); local
213 struct rk_cipher_rctx *rctx = skcipher_request_ctx(req); local
221 struct rk_cipher_rctx *rctx = skcipher_request_ctx(req); local
229 struct rk_cipher_rctx *rctx = skcipher_request_ctx(req); local
237 struct rk_cipher_rctx *rctx = skcipher_request_ctx(req); local
245 struct rk_cipher_rctx *rctx = skcipher_request_ctx(req); local
256 struct rk_cipher_rctx *rctx = skcipher_request_ctx(req); local
303 struct rk_cipher_rctx *rctx = skcipher_request_ctx(areq); local
[all...]
/linux-master/drivers/crypto/bcm/
H A Dcipher.c117 * @rctx: crypto request context
134 struct iproc_reqctx_s *rctx,
140 struct iproc_ctx_s *ctx = rctx->ctx;
144 rctx->gfp);
151 sg_set_buf(sg++, rctx->msg_buf.spu_resp_hdr, ctx->spu_resp_hdr_len);
156 sg_set_buf(sg++, rctx->msg_buf.c.supdt_tweak,
160 datalen = spu_msg_sg_add(&sg, &rctx->dst_sg, &rctx->dst_skip,
161 rctx->dst_nents, chunksize);
169 sg_set_buf(sg++, rctx
133 spu_skcipher_rx_sg_create(struct brcm_message *mssg, struct iproc_reqctx_s *rctx, u8 rx_frag_num, unsigned int chunksize, u32 stat_pad_len) argument
197 spu_skcipher_tx_sg_create(struct brcm_message *mssg, struct iproc_reqctx_s *rctx, u8 tx_frag_num, unsigned int chunksize, u32 pad_len) argument
299 handle_skcipher_req(struct iproc_reqctx_s *rctx) argument
470 handle_skcipher_resp(struct iproc_reqctx_s *rctx) argument
525 spu_ahash_rx_sg_create(struct brcm_message *mssg, struct iproc_reqctx_s *rctx, u8 rx_frag_num, unsigned int digestsize, u32 stat_pad_len) argument
577 spu_ahash_tx_sg_create(struct brcm_message *mssg, struct iproc_reqctx_s *rctx, u8 tx_frag_num, u32 spu_hdr_len, unsigned int hash_carry_len, unsigned int new_data_len, u32 pad_len) argument
652 handle_ahash_req(struct iproc_reqctx_s *rctx) argument
943 ahash_req_done(struct iproc_reqctx_s *rctx) argument
993 handle_ahash_resp(struct iproc_reqctx_s *rctx) argument
1041 spu_aead_rx_sg_create(struct brcm_message *mssg, struct aead_request *req, struct iproc_reqctx_s *rctx, u8 rx_frag_num, unsigned int assoc_len, u32 ret_iv_len, unsigned int resp_len, unsigned int digestsize, u32 stat_pad_len) argument
1161 spu_aead_tx_sg_create(struct brcm_message *mssg, struct iproc_reqctx_s *rctx, u8 tx_frag_num, u32 spu_hdr_len, struct scatterlist *assoc, unsigned int assoc_len, int assoc_nents, unsigned int aead_iv_len, unsigned int chunksize, u32 aad_pad_len, u32 pad_len, bool incl_icv) argument
1258 handle_aead_req(struct iproc_reqctx_s *rctx) argument
1528 handle_aead_resp(struct iproc_reqctx_s *rctx) argument
1588 spu_chunk_cleanup(struct iproc_reqctx_s *rctx) argument
1606 finish_req(struct iproc_reqctx_s *rctx, int err) argument
1628 struct iproc_reqctx_s *rctx; local
1714 struct iproc_reqctx_s *rctx = skcipher_request_ctx(req); local
1904 struct iproc_reqctx_s *rctx = ahash_request_ctx(req); local
1961 struct iproc_reqctx_s *rctx = ahash_request_ctx(req); local
2077 struct iproc_reqctx_s *rctx = ahash_request_ctx(req); local
2135 struct iproc_reqctx_s *rctx = ahash_request_ctx(req); local
2172 struct iproc_reqctx_s *rctx = ahash_request_ctx(req); local
2287 const struct iproc_reqctx_s *rctx = ahash_request_ctx(req); local
2302 struct iproc_reqctx_s *rctx = ahash_request_ctx(req); local
2418 struct iproc_reqctx_s *rctx = ahash_request_ctx(req); local
2468 struct iproc_reqctx_s *rctx = ahash_request_ctx(req); local
2506 struct iproc_reqctx_s *rctx = aead_request_ctx(req); local
2577 struct iproc_reqctx_s *rctx = aead_request_ctx(req); local
2600 struct iproc_reqctx_s *rctx = aead_request_ctx(req); local
[all...]
/linux-master/drivers/crypto/allwinner/sun8i-ss/
H A Dsun8i-ss-hash.c146 struct sun8i_ss_hash_reqctx *rctx = ahash_request_ctx(areq); local
150 memset(rctx, 0, sizeof(struct sun8i_ss_hash_reqctx));
152 ahash_request_set_tfm(&rctx->fallback_req, tfmctx->fallback_tfm);
153 rctx->fallback_req.base.flags = areq->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP;
155 return crypto_ahash_init(&rctx->fallback_req);
160 struct sun8i_ss_hash_reqctx *rctx = ahash_request_ctx(areq); local
164 ahash_request_set_tfm(&rctx->fallback_req, tfmctx->fallback_tfm);
165 rctx->fallback_req.base.flags = areq->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP;
167 return crypto_ahash_export(&rctx->fallback_req, out);
172 struct sun8i_ss_hash_reqctx *rctx local
184 struct sun8i_ss_hash_reqctx *rctx = ahash_request_ctx(areq); local
210 struct sun8i_ss_hash_reqctx *rctx = ahash_request_ctx(areq); local
225 struct sun8i_ss_hash_reqctx *rctx = ahash_request_ctx(areq); local
254 struct sun8i_ss_hash_reqctx *rctx = ahash_request_ctx(areq); local
281 sun8i_ss_run_hash_task(struct sun8i_ss_dev *ss, struct sun8i_ss_hash_reqctx *rctx, const char *name) argument
392 struct sun8i_ss_hash_reqctx *rctx = ahash_request_ctx(areq); local
478 struct sun8i_ss_hash_reqctx *rctx = ahash_request_ctx(areq); local
[all...]
/linux-master/crypto/
H A Dchacha20poly1305.c74 struct chachapoly_req_ctx *rctx = aead_request_ctx(req); local
76 rctx->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
97 struct chachapoly_req_ctx *rctx = aead_request_ctx(req); local
98 u8 tag[sizeof(rctx->tag)];
101 req->assoclen + rctx->cryptlen,
103 if (crypto_memneq(tag, rctx->tag, sizeof(tag)))
110 struct chachapoly_req_ctx *rctx = aead_request_ctx(req); local
112 scatterwalk_map_and_copy(rctx->tag, req->dst,
113 req->assoclen + rctx->cryptlen,
114 sizeof(rctx
126 struct chachapoly_req_ctx *rctx = aead_request_ctx(req); local
156 struct chachapoly_req_ctx *rctx = aead_request_ctx(req); local
173 struct chachapoly_req_ctx *rctx = aead_request_ctx(req); local
202 struct chachapoly_req_ctx *rctx = aead_request_ctx(req); local
231 struct chachapoly_req_ctx *rctx = aead_request_ctx(req); local
261 struct chachapoly_req_ctx *rctx = aead_request_ctx(req); local
290 struct chachapoly_req_ctx *rctx = aead_request_ctx(req); local
314 struct chachapoly_req_ctx *rctx = aead_request_ctx(req); local
340 struct chachapoly_req_ctx *rctx = aead_request_ctx(req); local
364 struct chachapoly_req_ctx *rctx = aead_request_ctx(req); local
402 struct chachapoly_req_ctx *rctx = aead_request_ctx(req); local
432 struct chachapoly_req_ctx *rctx = aead_request_ctx(req); local
454 struct chachapoly_req_ctx *rctx = aead_request_ctx(req); local
[all...]
H A Dxts.c85 struct xts_request_ctx *rctx = skcipher_request_ctx(req); local
90 le128 t = rctx->t;
94 req = &rctx->subreq;
113 rctx->t = t;
118 gf128mul_x_ble(&rctx->t, &t);
149 struct xts_request_ctx *rctx = skcipher_request_ctx(req); local
151 scatterwalk_map_and_copy(&b, rctx->tail, 0, XTS_BLOCK_SIZE, 0);
152 le128_xor(&b, &rctx->t, &b);
153 scatterwalk_map_and_copy(&b, rctx->tail, 0, XTS_BLOCK_SIZE, 1);
165 struct xts_request_ctx *rctx local
204 struct xts_request_ctx *rctx = skcipher_request_ctx(req); local
224 struct xts_request_ctx *rctx = skcipher_request_ctx(req); local
244 struct xts_request_ctx *rctx = skcipher_request_ctx(req); local
263 struct xts_request_ctx *rctx = skcipher_request_ctx(req); local
280 struct xts_request_ctx *rctx = skcipher_request_ctx(req); local
[all...]
/linux-master/drivers/crypto/
H A Dsahara.c543 struct sahara_aes_reqctx *rctx = skcipher_request_ctx(req); local
547 if (rctx->mode & FLAGS_ENCRYPT) {
551 memcpy(req->iv, rctx->iv_out, ivsize);
560 struct sahara_aes_reqctx *rctx; local
574 rctx = skcipher_request_ctx(req);
576 rctx->mode &= FLAGS_MODE_MASK;
577 dev->flags = (dev->flags & ~FLAGS_MODE_MASK) | rctx->mode;
586 rctx->iv_out, ivsize,
646 struct sahara_aes_reqctx *rctx = skcipher_request_ctx(req); local
650 skcipher_request_set_tfm(&rctx
666 struct sahara_aes_reqctx *rctx = skcipher_request_ctx(req); local
733 sahara_sha_init_hdr(struct sahara_dev *dev, struct sahara_sha_reqctx *rctx) argument
756 sahara_sha_hw_links_create(struct sahara_dev *dev, struct sahara_sha_reqctx *rctx, int start) argument
799 sahara_sha_hw_data_descriptor_create(struct sahara_dev *dev, struct sahara_sha_reqctx *rctx, struct ahash_request *req, int index) argument
852 sahara_sha_hw_context_descriptor_create(struct sahara_dev *dev, struct sahara_sha_reqctx *rctx, struct ahash_request *req, int index) argument
874 struct sahara_sha_reqctx *rctx = ahash_request_ctx(req); local
932 struct sahara_sha_reqctx *rctx = ahash_request_ctx(req); local
1012 struct sahara_sha_reqctx *rctx = ahash_request_ctx(req); local
1026 struct sahara_sha_reqctx *rctx = ahash_request_ctx(req); local
1074 struct sahara_sha_reqctx *rctx = ahash_request_ctx(req); local
1083 struct sahara_sha_reqctx *rctx = ahash_request_ctx(req); local
[all...]
/linux-master/drivers/crypto/gemini/
H A Dsl3516-ce-cipher.c107 struct sl3516_ce_cipher_req_ctx *rctx = skcipher_request_ctx(areq); local
115 skcipher_request_set_tfm(&rctx->fallback_req, op->fallback_tfm);
116 skcipher_request_set_callback(&rctx->fallback_req, areq->base.flags,
118 skcipher_request_set_crypt(&rctx->fallback_req, areq->src, areq->dst,
120 if (rctx->op_dir == CE_DECRYPTION)
121 err = crypto_skcipher_decrypt(&rctx->fallback_req);
123 err = crypto_skcipher_encrypt(&rctx->fallback_req);
132 struct sl3516_ce_cipher_req_ctx *rctx = skcipher_request_ctx(areq); local
148 rctx->op_dir, areq->iv, crypto_skcipher_ivsize(tfm),
185 rctx
282 struct sl3516_ce_cipher_req_ctx *rctx = skcipher_request_ctx(areq); local
300 struct sl3516_ce_cipher_req_ctx *rctx = skcipher_request_ctx(areq); local
[all...]

Completed in 486 milliseconds

1234