1// SPDX-License-Identifier: GPL-2.0-only
2/*
3 * Crypto acceleration support for Rockchip RK3288
4 *
5 * Copyright (c) 2015, Fuzhou Rockchip Electronics Co., Ltd
6 *
7 * Author: Zain Wang <zain.wang@rock-chips.com>
8 *
9 * Some ideas are from marvell-cesa.c and s5p-sss.c driver.
10 */
11
12#include <crypto/engine.h>
13#include <crypto/internal/skcipher.h>
14#include <crypto/scatterwalk.h>
15#include <linux/device.h>
16#include <linux/err.h>
17#include <linux/kernel.h>
18#include <linux/string.h>
19#include "rk3288_crypto.h"
20
21#define RK_CRYPTO_DEC			BIT(0)
22
23static int rk_cipher_need_fallback(struct skcipher_request *req)
24{
25	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
26	struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
27	struct rk_crypto_tmp *algt = container_of(alg, struct rk_crypto_tmp, alg.skcipher.base);
28	struct scatterlist *sgs, *sgd;
29	unsigned int stodo, dtodo, len;
30	unsigned int bs = crypto_skcipher_blocksize(tfm);
31
32	if (!req->cryptlen)
33		return true;
34
35	len = req->cryptlen;
36	sgs = req->src;
37	sgd = req->dst;
38	while (sgs && sgd) {
39		if (!IS_ALIGNED(sgs->offset, sizeof(u32))) {
40			algt->stat_fb_align++;
41			return true;
42		}
43		if (!IS_ALIGNED(sgd->offset, sizeof(u32))) {
44			algt->stat_fb_align++;
45			return true;
46		}
47		stodo = min(len, sgs->length);
48		if (stodo % bs) {
49			algt->stat_fb_len++;
50			return true;
51		}
52		dtodo = min(len, sgd->length);
53		if (dtodo % bs) {
54			algt->stat_fb_len++;
55			return true;
56		}
57		if (stodo != dtodo) {
58			algt->stat_fb_sgdiff++;
59			return true;
60		}
61		len -= stodo;
62		sgs = sg_next(sgs);
63		sgd = sg_next(sgd);
64	}
65	return false;
66}
67
68static int rk_cipher_fallback(struct skcipher_request *areq)
69{
70	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
71	struct rk_cipher_ctx *op = crypto_skcipher_ctx(tfm);
72	struct rk_cipher_rctx *rctx = skcipher_request_ctx(areq);
73	struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
74	struct rk_crypto_tmp *algt = container_of(alg, struct rk_crypto_tmp, alg.skcipher.base);
75	int err;
76
77	algt->stat_fb++;
78
79	skcipher_request_set_tfm(&rctx->fallback_req, op->fallback_tfm);
80	skcipher_request_set_callback(&rctx->fallback_req, areq->base.flags,
81				      areq->base.complete, areq->base.data);
82	skcipher_request_set_crypt(&rctx->fallback_req, areq->src, areq->dst,
83				   areq->cryptlen, areq->iv);
84	if (rctx->mode & RK_CRYPTO_DEC)
85		err = crypto_skcipher_decrypt(&rctx->fallback_req);
86	else
87		err = crypto_skcipher_encrypt(&rctx->fallback_req);
88	return err;
89}
90
91static int rk_cipher_handle_req(struct skcipher_request *req)
92{
93	struct rk_cipher_rctx *rctx = skcipher_request_ctx(req);
94	struct rk_crypto_info *rkc;
95	struct crypto_engine *engine;
96
97	if (rk_cipher_need_fallback(req))
98		return rk_cipher_fallback(req);
99
100	rkc = get_rk_crypto();
101
102	engine = rkc->engine;
103	rctx->dev = rkc;
104
105	return crypto_transfer_skcipher_request_to_engine(engine, req);
106}
107
108static int rk_aes_setkey(struct crypto_skcipher *cipher,
109			 const u8 *key, unsigned int keylen)
110{
111	struct crypto_tfm *tfm = crypto_skcipher_tfm(cipher);
112	struct rk_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
113
114	if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
115	    keylen != AES_KEYSIZE_256)
116		return -EINVAL;
117	ctx->keylen = keylen;
118	memcpy(ctx->key, key, keylen);
119
120	return crypto_skcipher_setkey(ctx->fallback_tfm, key, keylen);
121}
122
123static int rk_des_setkey(struct crypto_skcipher *cipher,
124			 const u8 *key, unsigned int keylen)
125{
126	struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(cipher);
127	int err;
128
129	err = verify_skcipher_des_key(cipher, key);
130	if (err)
131		return err;
132
133	ctx->keylen = keylen;
134	memcpy(ctx->key, key, keylen);
135
136	return crypto_skcipher_setkey(ctx->fallback_tfm, key, keylen);
137}
138
139static int rk_tdes_setkey(struct crypto_skcipher *cipher,
140			  const u8 *key, unsigned int keylen)
141{
142	struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(cipher);
143	int err;
144
145	err = verify_skcipher_des3_key(cipher, key);
146	if (err)
147		return err;
148
149	ctx->keylen = keylen;
150	memcpy(ctx->key, key, keylen);
151
152	return crypto_skcipher_setkey(ctx->fallback_tfm, key, keylen);
153}
154
155static int rk_aes_ecb_encrypt(struct skcipher_request *req)
156{
157	struct rk_cipher_rctx *rctx = skcipher_request_ctx(req);
158
159	rctx->mode = RK_CRYPTO_AES_ECB_MODE;
160	return rk_cipher_handle_req(req);
161}
162
163static int rk_aes_ecb_decrypt(struct skcipher_request *req)
164{
165	struct rk_cipher_rctx *rctx = skcipher_request_ctx(req);
166
167	rctx->mode = RK_CRYPTO_AES_ECB_MODE | RK_CRYPTO_DEC;
168	return rk_cipher_handle_req(req);
169}
170
171static int rk_aes_cbc_encrypt(struct skcipher_request *req)
172{
173	struct rk_cipher_rctx *rctx = skcipher_request_ctx(req);
174
175	rctx->mode = RK_CRYPTO_AES_CBC_MODE;
176	return rk_cipher_handle_req(req);
177}
178
179static int rk_aes_cbc_decrypt(struct skcipher_request *req)
180{
181	struct rk_cipher_rctx *rctx = skcipher_request_ctx(req);
182
183	rctx->mode = RK_CRYPTO_AES_CBC_MODE | RK_CRYPTO_DEC;
184	return rk_cipher_handle_req(req);
185}
186
187static int rk_des_ecb_encrypt(struct skcipher_request *req)
188{
189	struct rk_cipher_rctx *rctx = skcipher_request_ctx(req);
190
191	rctx->mode = 0;
192	return rk_cipher_handle_req(req);
193}
194
195static int rk_des_ecb_decrypt(struct skcipher_request *req)
196{
197	struct rk_cipher_rctx *rctx = skcipher_request_ctx(req);
198
199	rctx->mode = RK_CRYPTO_DEC;
200	return rk_cipher_handle_req(req);
201}
202
203static int rk_des_cbc_encrypt(struct skcipher_request *req)
204{
205	struct rk_cipher_rctx *rctx = skcipher_request_ctx(req);
206
207	rctx->mode = RK_CRYPTO_TDES_CHAINMODE_CBC;
208	return rk_cipher_handle_req(req);
209}
210
211static int rk_des_cbc_decrypt(struct skcipher_request *req)
212{
213	struct rk_cipher_rctx *rctx = skcipher_request_ctx(req);
214
215	rctx->mode = RK_CRYPTO_TDES_CHAINMODE_CBC | RK_CRYPTO_DEC;
216	return rk_cipher_handle_req(req);
217}
218
219static int rk_des3_ede_ecb_encrypt(struct skcipher_request *req)
220{
221	struct rk_cipher_rctx *rctx = skcipher_request_ctx(req);
222
223	rctx->mode = RK_CRYPTO_TDES_SELECT;
224	return rk_cipher_handle_req(req);
225}
226
227static int rk_des3_ede_ecb_decrypt(struct skcipher_request *req)
228{
229	struct rk_cipher_rctx *rctx = skcipher_request_ctx(req);
230
231	rctx->mode = RK_CRYPTO_TDES_SELECT | RK_CRYPTO_DEC;
232	return rk_cipher_handle_req(req);
233}
234
235static int rk_des3_ede_cbc_encrypt(struct skcipher_request *req)
236{
237	struct rk_cipher_rctx *rctx = skcipher_request_ctx(req);
238
239	rctx->mode = RK_CRYPTO_TDES_SELECT | RK_CRYPTO_TDES_CHAINMODE_CBC;
240	return rk_cipher_handle_req(req);
241}
242
243static int rk_des3_ede_cbc_decrypt(struct skcipher_request *req)
244{
245	struct rk_cipher_rctx *rctx = skcipher_request_ctx(req);
246
247	rctx->mode = RK_CRYPTO_TDES_SELECT | RK_CRYPTO_TDES_CHAINMODE_CBC |
248		    RK_CRYPTO_DEC;
249	return rk_cipher_handle_req(req);
250}
251
252static void rk_cipher_hw_init(struct rk_crypto_info *dev, struct skcipher_request *req)
253{
254	struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
255	struct crypto_tfm *tfm = crypto_skcipher_tfm(cipher);
256	struct rk_cipher_rctx *rctx = skcipher_request_ctx(req);
257	struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(cipher);
258	u32 block, conf_reg = 0;
259
260	block = crypto_tfm_alg_blocksize(tfm);
261
262	if (block == DES_BLOCK_SIZE) {
263		rctx->mode |= RK_CRYPTO_TDES_FIFO_MODE |
264			     RK_CRYPTO_TDES_BYTESWAP_KEY |
265			     RK_CRYPTO_TDES_BYTESWAP_IV;
266		CRYPTO_WRITE(dev, RK_CRYPTO_TDES_CTRL, rctx->mode);
267		memcpy_toio(dev->reg + RK_CRYPTO_TDES_KEY1_0, ctx->key, ctx->keylen);
268		conf_reg = RK_CRYPTO_DESSEL;
269	} else {
270		rctx->mode |= RK_CRYPTO_AES_FIFO_MODE |
271			     RK_CRYPTO_AES_KEY_CHANGE |
272			     RK_CRYPTO_AES_BYTESWAP_KEY |
273			     RK_CRYPTO_AES_BYTESWAP_IV;
274		if (ctx->keylen == AES_KEYSIZE_192)
275			rctx->mode |= RK_CRYPTO_AES_192BIT_key;
276		else if (ctx->keylen == AES_KEYSIZE_256)
277			rctx->mode |= RK_CRYPTO_AES_256BIT_key;
278		CRYPTO_WRITE(dev, RK_CRYPTO_AES_CTRL, rctx->mode);
279		memcpy_toio(dev->reg + RK_CRYPTO_AES_KEY_0, ctx->key, ctx->keylen);
280	}
281	conf_reg |= RK_CRYPTO_BYTESWAP_BTFIFO |
282		    RK_CRYPTO_BYTESWAP_BRFIFO;
283	CRYPTO_WRITE(dev, RK_CRYPTO_CONF, conf_reg);
284	CRYPTO_WRITE(dev, RK_CRYPTO_INTENA,
285		     RK_CRYPTO_BCDMA_ERR_ENA | RK_CRYPTO_BCDMA_DONE_ENA);
286}
287
288static void crypto_dma_start(struct rk_crypto_info *dev,
289			     struct scatterlist *sgs,
290			     struct scatterlist *sgd, unsigned int todo)
291{
292	CRYPTO_WRITE(dev, RK_CRYPTO_BRDMAS, sg_dma_address(sgs));
293	CRYPTO_WRITE(dev, RK_CRYPTO_BRDMAL, todo);
294	CRYPTO_WRITE(dev, RK_CRYPTO_BTDMAS, sg_dma_address(sgd));
295	CRYPTO_WRITE(dev, RK_CRYPTO_CTRL, RK_CRYPTO_BLOCK_START |
296		     _SBF(RK_CRYPTO_BLOCK_START, 16));
297}
298
299static int rk_cipher_run(struct crypto_engine *engine, void *async_req)
300{
301	struct skcipher_request *areq = container_of(async_req, struct skcipher_request, base);
302	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
303	struct rk_cipher_rctx *rctx = skcipher_request_ctx(areq);
304	struct scatterlist *sgs, *sgd;
305	int err = 0;
306	int ivsize = crypto_skcipher_ivsize(tfm);
307	int offset;
308	u8 iv[AES_BLOCK_SIZE];
309	u8 biv[AES_BLOCK_SIZE];
310	u8 *ivtouse = areq->iv;
311	unsigned int len = areq->cryptlen;
312	unsigned int todo;
313	struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
314	struct rk_crypto_tmp *algt = container_of(alg, struct rk_crypto_tmp, alg.skcipher.base);
315	struct rk_crypto_info *rkc = rctx->dev;
316
317	err = pm_runtime_resume_and_get(rkc->dev);
318	if (err)
319		return err;
320
321	algt->stat_req++;
322	rkc->nreq++;
323
324	ivsize = crypto_skcipher_ivsize(tfm);
325	if (areq->iv && crypto_skcipher_ivsize(tfm) > 0) {
326		if (rctx->mode & RK_CRYPTO_DEC) {
327			offset = areq->cryptlen - ivsize;
328			scatterwalk_map_and_copy(rctx->backup_iv, areq->src,
329						 offset, ivsize, 0);
330		}
331	}
332
333	sgs = areq->src;
334	sgd = areq->dst;
335
336	while (sgs && sgd && len) {
337		if (!sgs->length) {
338			sgs = sg_next(sgs);
339			sgd = sg_next(sgd);
340			continue;
341		}
342		if (rctx->mode & RK_CRYPTO_DEC) {
343			/* we backup last block of source to be used as IV at next step */
344			offset = sgs->length - ivsize;
345			scatterwalk_map_and_copy(biv, sgs, offset, ivsize, 0);
346		}
347		if (sgs == sgd) {
348			err = dma_map_sg(rkc->dev, sgs, 1, DMA_BIDIRECTIONAL);
349			if (err <= 0) {
350				err = -EINVAL;
351				goto theend_iv;
352			}
353		} else {
354			err = dma_map_sg(rkc->dev, sgs, 1, DMA_TO_DEVICE);
355			if (err <= 0) {
356				err = -EINVAL;
357				goto theend_iv;
358			}
359			err = dma_map_sg(rkc->dev, sgd, 1, DMA_FROM_DEVICE);
360			if (err <= 0) {
361				err = -EINVAL;
362				goto theend_sgs;
363			}
364		}
365		err = 0;
366		rk_cipher_hw_init(rkc, areq);
367		if (ivsize) {
368			if (ivsize == DES_BLOCK_SIZE)
369				memcpy_toio(rkc->reg + RK_CRYPTO_TDES_IV_0, ivtouse, ivsize);
370			else
371				memcpy_toio(rkc->reg + RK_CRYPTO_AES_IV_0, ivtouse, ivsize);
372		}
373		reinit_completion(&rkc->complete);
374		rkc->status = 0;
375
376		todo = min(sg_dma_len(sgs), len);
377		len -= todo;
378		crypto_dma_start(rkc, sgs, sgd, todo / 4);
379		wait_for_completion_interruptible_timeout(&rkc->complete,
380							  msecs_to_jiffies(2000));
381		if (!rkc->status) {
382			dev_err(rkc->dev, "DMA timeout\n");
383			err = -EFAULT;
384			goto theend;
385		}
386		if (sgs == sgd) {
387			dma_unmap_sg(rkc->dev, sgs, 1, DMA_BIDIRECTIONAL);
388		} else {
389			dma_unmap_sg(rkc->dev, sgs, 1, DMA_TO_DEVICE);
390			dma_unmap_sg(rkc->dev, sgd, 1, DMA_FROM_DEVICE);
391		}
392		if (rctx->mode & RK_CRYPTO_DEC) {
393			memcpy(iv, biv, ivsize);
394			ivtouse = iv;
395		} else {
396			offset = sgd->length - ivsize;
397			scatterwalk_map_and_copy(iv, sgd, offset, ivsize, 0);
398			ivtouse = iv;
399		}
400		sgs = sg_next(sgs);
401		sgd = sg_next(sgd);
402	}
403
404	if (areq->iv && ivsize > 0) {
405		offset = areq->cryptlen - ivsize;
406		if (rctx->mode & RK_CRYPTO_DEC) {
407			memcpy(areq->iv, rctx->backup_iv, ivsize);
408			memzero_explicit(rctx->backup_iv, ivsize);
409		} else {
410			scatterwalk_map_and_copy(areq->iv, areq->dst, offset,
411						 ivsize, 0);
412		}
413	}
414
415theend:
416	pm_runtime_put_autosuspend(rkc->dev);
417
418	local_bh_disable();
419	crypto_finalize_skcipher_request(engine, areq, err);
420	local_bh_enable();
421	return 0;
422
423theend_sgs:
424	if (sgs == sgd) {
425		dma_unmap_sg(rkc->dev, sgs, 1, DMA_BIDIRECTIONAL);
426	} else {
427		dma_unmap_sg(rkc->dev, sgs, 1, DMA_TO_DEVICE);
428		dma_unmap_sg(rkc->dev, sgd, 1, DMA_FROM_DEVICE);
429	}
430theend_iv:
431	return err;
432}
433
434static int rk_cipher_tfm_init(struct crypto_skcipher *tfm)
435{
436	struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
437	const char *name = crypto_tfm_alg_name(&tfm->base);
438	struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
439	struct rk_crypto_tmp *algt = container_of(alg, struct rk_crypto_tmp, alg.skcipher.base);
440
441	ctx->fallback_tfm = crypto_alloc_skcipher(name, 0, CRYPTO_ALG_NEED_FALLBACK);
442	if (IS_ERR(ctx->fallback_tfm)) {
443		dev_err(algt->dev->dev, "ERROR: Cannot allocate fallback for %s %ld\n",
444			name, PTR_ERR(ctx->fallback_tfm));
445		return PTR_ERR(ctx->fallback_tfm);
446	}
447
448	crypto_skcipher_set_reqsize(tfm, sizeof(struct rk_cipher_rctx) +
449				    crypto_skcipher_reqsize(ctx->fallback_tfm));
450
451	return 0;
452}
453
454static void rk_cipher_tfm_exit(struct crypto_skcipher *tfm)
455{
456	struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
457
458	memzero_explicit(ctx->key, ctx->keylen);
459	crypto_free_skcipher(ctx->fallback_tfm);
460}
461
462struct rk_crypto_tmp rk_ecb_aes_alg = {
463	.type = CRYPTO_ALG_TYPE_SKCIPHER,
464	.alg.skcipher.base = {
465		.base.cra_name		= "ecb(aes)",
466		.base.cra_driver_name	= "ecb-aes-rk",
467		.base.cra_priority	= 300,
468		.base.cra_flags		= CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK,
469		.base.cra_blocksize	= AES_BLOCK_SIZE,
470		.base.cra_ctxsize	= sizeof(struct rk_cipher_ctx),
471		.base.cra_alignmask	= 0x0f,
472		.base.cra_module	= THIS_MODULE,
473
474		.init			= rk_cipher_tfm_init,
475		.exit			= rk_cipher_tfm_exit,
476		.min_keysize		= AES_MIN_KEY_SIZE,
477		.max_keysize		= AES_MAX_KEY_SIZE,
478		.setkey			= rk_aes_setkey,
479		.encrypt		= rk_aes_ecb_encrypt,
480		.decrypt		= rk_aes_ecb_decrypt,
481	},
482	.alg.skcipher.op = {
483		.do_one_request = rk_cipher_run,
484	},
485};
486
487struct rk_crypto_tmp rk_cbc_aes_alg = {
488	.type = CRYPTO_ALG_TYPE_SKCIPHER,
489	.alg.skcipher.base = {
490		.base.cra_name		= "cbc(aes)",
491		.base.cra_driver_name	= "cbc-aes-rk",
492		.base.cra_priority	= 300,
493		.base.cra_flags		= CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK,
494		.base.cra_blocksize	= AES_BLOCK_SIZE,
495		.base.cra_ctxsize	= sizeof(struct rk_cipher_ctx),
496		.base.cra_alignmask	= 0x0f,
497		.base.cra_module	= THIS_MODULE,
498
499		.init			= rk_cipher_tfm_init,
500		.exit			= rk_cipher_tfm_exit,
501		.min_keysize		= AES_MIN_KEY_SIZE,
502		.max_keysize		= AES_MAX_KEY_SIZE,
503		.ivsize			= AES_BLOCK_SIZE,
504		.setkey			= rk_aes_setkey,
505		.encrypt		= rk_aes_cbc_encrypt,
506		.decrypt		= rk_aes_cbc_decrypt,
507	},
508	.alg.skcipher.op = {
509		.do_one_request = rk_cipher_run,
510	},
511};
512
513struct rk_crypto_tmp rk_ecb_des_alg = {
514	.type = CRYPTO_ALG_TYPE_SKCIPHER,
515	.alg.skcipher.base = {
516		.base.cra_name		= "ecb(des)",
517		.base.cra_driver_name	= "ecb-des-rk",
518		.base.cra_priority	= 300,
519		.base.cra_flags		= CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK,
520		.base.cra_blocksize	= DES_BLOCK_SIZE,
521		.base.cra_ctxsize	= sizeof(struct rk_cipher_ctx),
522		.base.cra_alignmask	= 0x07,
523		.base.cra_module	= THIS_MODULE,
524
525		.init			= rk_cipher_tfm_init,
526		.exit			= rk_cipher_tfm_exit,
527		.min_keysize		= DES_KEY_SIZE,
528		.max_keysize		= DES_KEY_SIZE,
529		.setkey			= rk_des_setkey,
530		.encrypt		= rk_des_ecb_encrypt,
531		.decrypt		= rk_des_ecb_decrypt,
532	},
533	.alg.skcipher.op = {
534		.do_one_request = rk_cipher_run,
535	},
536};
537
538struct rk_crypto_tmp rk_cbc_des_alg = {
539	.type = CRYPTO_ALG_TYPE_SKCIPHER,
540	.alg.skcipher.base = {
541		.base.cra_name		= "cbc(des)",
542		.base.cra_driver_name	= "cbc-des-rk",
543		.base.cra_priority	= 300,
544		.base.cra_flags		= CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK,
545		.base.cra_blocksize	= DES_BLOCK_SIZE,
546		.base.cra_ctxsize	= sizeof(struct rk_cipher_ctx),
547		.base.cra_alignmask	= 0x07,
548		.base.cra_module	= THIS_MODULE,
549
550		.init			= rk_cipher_tfm_init,
551		.exit			= rk_cipher_tfm_exit,
552		.min_keysize		= DES_KEY_SIZE,
553		.max_keysize		= DES_KEY_SIZE,
554		.ivsize			= DES_BLOCK_SIZE,
555		.setkey			= rk_des_setkey,
556		.encrypt		= rk_des_cbc_encrypt,
557		.decrypt		= rk_des_cbc_decrypt,
558	},
559	.alg.skcipher.op = {
560		.do_one_request = rk_cipher_run,
561	},
562};
563
564struct rk_crypto_tmp rk_ecb_des3_ede_alg = {
565	.type = CRYPTO_ALG_TYPE_SKCIPHER,
566	.alg.skcipher.base = {
567		.base.cra_name		= "ecb(des3_ede)",
568		.base.cra_driver_name	= "ecb-des3-ede-rk",
569		.base.cra_priority	= 300,
570		.base.cra_flags		= CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK,
571		.base.cra_blocksize	= DES_BLOCK_SIZE,
572		.base.cra_ctxsize	= sizeof(struct rk_cipher_ctx),
573		.base.cra_alignmask	= 0x07,
574		.base.cra_module	= THIS_MODULE,
575
576		.init			= rk_cipher_tfm_init,
577		.exit			= rk_cipher_tfm_exit,
578		.min_keysize		= DES3_EDE_KEY_SIZE,
579		.max_keysize		= DES3_EDE_KEY_SIZE,
580		.setkey			= rk_tdes_setkey,
581		.encrypt		= rk_des3_ede_ecb_encrypt,
582		.decrypt		= rk_des3_ede_ecb_decrypt,
583	},
584	.alg.skcipher.op = {
585		.do_one_request = rk_cipher_run,
586	},
587};
588
589struct rk_crypto_tmp rk_cbc_des3_ede_alg = {
590	.type = CRYPTO_ALG_TYPE_SKCIPHER,
591	.alg.skcipher.base = {
592		.base.cra_name		= "cbc(des3_ede)",
593		.base.cra_driver_name	= "cbc-des3-ede-rk",
594		.base.cra_priority	= 300,
595		.base.cra_flags		= CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK,
596		.base.cra_blocksize	= DES_BLOCK_SIZE,
597		.base.cra_ctxsize	= sizeof(struct rk_cipher_ctx),
598		.base.cra_alignmask	= 0x07,
599		.base.cra_module	= THIS_MODULE,
600
601		.init			= rk_cipher_tfm_init,
602		.exit			= rk_cipher_tfm_exit,
603		.min_keysize		= DES3_EDE_KEY_SIZE,
604		.max_keysize		= DES3_EDE_KEY_SIZE,
605		.ivsize			= DES_BLOCK_SIZE,
606		.setkey			= rk_tdes_setkey,
607		.encrypt		= rk_des3_ede_cbc_encrypt,
608		.decrypt		= rk_des3_ede_cbc_decrypt,
609	},
610	.alg.skcipher.op = {
611		.do_one_request = rk_cipher_run,
612	},
613};
614