1// SPDX-License-Identifier: GPL-2.0
2/*
3 * sun8i-ss-cipher.c - hardware cryptographic offloader for
4 * Allwinner A80/A83T SoC
5 *
6 * Copyright (C) 2016-2019 Corentin LABBE <clabbe.montjoie@gmail.com>
7 *
8 * This file add support for AES cipher with 128,192,256 bits keysize in
9 * CBC and ECB mode.
10 *
11 * You could find a link for the datasheet in Documentation/arch/arm/sunxi.rst
12 */
13
14#include <linux/bottom_half.h>
15#include <linux/crypto.h>
16#include <linux/dma-mapping.h>
17#include <linux/io.h>
18#include <linux/pm_runtime.h>
19#include <crypto/scatterwalk.h>
20#include <crypto/internal/skcipher.h>
21#include "sun8i-ss.h"
22
23static bool sun8i_ss_need_fallback(struct skcipher_request *areq)
24{
25	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
26	struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
27	struct sun8i_ss_alg_template *algt = container_of(alg, struct sun8i_ss_alg_template, alg.skcipher.base);
28	struct scatterlist *in_sg = areq->src;
29	struct scatterlist *out_sg = areq->dst;
30	struct scatterlist *sg;
31	unsigned int todo, len;
32
33	if (areq->cryptlen == 0 || areq->cryptlen % 16) {
34		algt->stat_fb_len++;
35		return true;
36	}
37
38	if (sg_nents_for_len(areq->src, areq->cryptlen) > 8 ||
39		sg_nents_for_len(areq->dst, areq->cryptlen) > 8) {
40		algt->stat_fb_sgnum++;
41		return true;
42	}
43
44	len = areq->cryptlen;
45	sg = areq->src;
46	while (sg) {
47		todo = min(len, sg->length);
48		if ((todo % 16) != 0) {
49			algt->stat_fb_sglen++;
50			return true;
51		}
52		if (!IS_ALIGNED(sg->offset, 16)) {
53			algt->stat_fb_align++;
54			return true;
55		}
56		len -= todo;
57		sg = sg_next(sg);
58	}
59	len = areq->cryptlen;
60	sg = areq->dst;
61	while (sg) {
62		todo = min(len, sg->length);
63		if ((todo % 16) != 0) {
64			algt->stat_fb_sglen++;
65			return true;
66		}
67		if (!IS_ALIGNED(sg->offset, 16)) {
68			algt->stat_fb_align++;
69			return true;
70		}
71		len -= todo;
72		sg = sg_next(sg);
73	}
74
75	/* SS need same numbers of SG (with same length) for source and destination */
76	in_sg = areq->src;
77	out_sg = areq->dst;
78	while (in_sg && out_sg) {
79		if (in_sg->length != out_sg->length)
80			return true;
81		in_sg = sg_next(in_sg);
82		out_sg = sg_next(out_sg);
83	}
84	if (in_sg || out_sg)
85		return true;
86	return false;
87}
88
89static int sun8i_ss_cipher_fallback(struct skcipher_request *areq)
90{
91	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
92	struct sun8i_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm);
93	struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq);
94	int err;
95
96	if (IS_ENABLED(CONFIG_CRYPTO_DEV_SUN8I_SS_DEBUG)) {
97		struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
98		struct sun8i_ss_alg_template *algt __maybe_unused;
99
100		algt = container_of(alg, struct sun8i_ss_alg_template,
101				    alg.skcipher.base);
102
103#ifdef CONFIG_CRYPTO_DEV_SUN8I_SS_DEBUG
104		algt->stat_fb++;
105#endif
106	}
107
108	skcipher_request_set_tfm(&rctx->fallback_req, op->fallback_tfm);
109	skcipher_request_set_callback(&rctx->fallback_req, areq->base.flags,
110				      areq->base.complete, areq->base.data);
111	skcipher_request_set_crypt(&rctx->fallback_req, areq->src, areq->dst,
112				   areq->cryptlen, areq->iv);
113	if (rctx->op_dir & SS_DECRYPTION)
114		err = crypto_skcipher_decrypt(&rctx->fallback_req);
115	else
116		err = crypto_skcipher_encrypt(&rctx->fallback_req);
117	return err;
118}
119
120static int sun8i_ss_setup_ivs(struct skcipher_request *areq)
121{
122	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
123	struct sun8i_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm);
124	struct sun8i_ss_dev *ss = op->ss;
125	struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq);
126	struct scatterlist *sg = areq->src;
127	unsigned int todo, offset;
128	unsigned int len = areq->cryptlen;
129	unsigned int ivsize = crypto_skcipher_ivsize(tfm);
130	struct sun8i_ss_flow *sf = &ss->flows[rctx->flow];
131	int i = 0;
132	dma_addr_t a;
133	int err;
134
135	rctx->ivlen = ivsize;
136	if (rctx->op_dir & SS_DECRYPTION) {
137		offset = areq->cryptlen - ivsize;
138		scatterwalk_map_and_copy(sf->biv, areq->src, offset,
139					 ivsize, 0);
140	}
141
142	/* we need to copy all IVs from source in case DMA is bi-directionnal */
143	while (sg && len) {
144		if (sg_dma_len(sg) == 0) {
145			sg = sg_next(sg);
146			continue;
147		}
148		if (i == 0)
149			memcpy(sf->iv[0], areq->iv, ivsize);
150		a = dma_map_single(ss->dev, sf->iv[i], ivsize, DMA_TO_DEVICE);
151		if (dma_mapping_error(ss->dev, a)) {
152			memzero_explicit(sf->iv[i], ivsize);
153			dev_err(ss->dev, "Cannot DMA MAP IV\n");
154			err = -EFAULT;
155			goto dma_iv_error;
156		}
157		rctx->p_iv[i] = a;
158		/* we need to setup all others IVs only in the decrypt way */
159		if (rctx->op_dir == SS_ENCRYPTION)
160			return 0;
161		todo = min(len, sg_dma_len(sg));
162		len -= todo;
163		i++;
164		if (i < MAX_SG) {
165			offset = sg->length - ivsize;
166			scatterwalk_map_and_copy(sf->iv[i], sg, offset, ivsize, 0);
167		}
168		rctx->niv = i;
169		sg = sg_next(sg);
170	}
171
172	return 0;
173dma_iv_error:
174	i--;
175	while (i >= 0) {
176		dma_unmap_single(ss->dev, rctx->p_iv[i], ivsize, DMA_TO_DEVICE);
177		memzero_explicit(sf->iv[i], ivsize);
178		i--;
179	}
180	return err;
181}
182
183static int sun8i_ss_cipher(struct skcipher_request *areq)
184{
185	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
186	struct sun8i_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm);
187	struct sun8i_ss_dev *ss = op->ss;
188	struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq);
189	struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
190	struct sun8i_ss_alg_template *algt;
191	struct sun8i_ss_flow *sf = &ss->flows[rctx->flow];
192	struct scatterlist *sg;
193	unsigned int todo, len, offset, ivsize;
194	int nr_sgs = 0;
195	int nr_sgd = 0;
196	int err = 0;
197	int nsgs = sg_nents_for_len(areq->src, areq->cryptlen);
198	int nsgd = sg_nents_for_len(areq->dst, areq->cryptlen);
199	int i;
200
201	algt = container_of(alg, struct sun8i_ss_alg_template, alg.skcipher.base);
202
203	dev_dbg(ss->dev, "%s %s %u %x IV(%p %u) key=%u\n", __func__,
204		crypto_tfm_alg_name(areq->base.tfm),
205		areq->cryptlen,
206		rctx->op_dir, areq->iv, crypto_skcipher_ivsize(tfm),
207		op->keylen);
208
209#ifdef CONFIG_CRYPTO_DEV_SUN8I_SS_DEBUG
210	algt->stat_req++;
211#endif
212
213	rctx->op_mode = ss->variant->op_mode[algt->ss_blockmode];
214	rctx->method = ss->variant->alg_cipher[algt->ss_algo_id];
215	rctx->keylen = op->keylen;
216
217	rctx->p_key = dma_map_single(ss->dev, op->key, op->keylen, DMA_TO_DEVICE);
218	if (dma_mapping_error(ss->dev, rctx->p_key)) {
219		dev_err(ss->dev, "Cannot DMA MAP KEY\n");
220		err = -EFAULT;
221		goto theend;
222	}
223
224	ivsize = crypto_skcipher_ivsize(tfm);
225	if (areq->iv && crypto_skcipher_ivsize(tfm) > 0) {
226		err = sun8i_ss_setup_ivs(areq);
227		if (err)
228			goto theend_key;
229	}
230	if (areq->src == areq->dst) {
231		nr_sgs = dma_map_sg(ss->dev, areq->src, nsgs, DMA_BIDIRECTIONAL);
232		if (nr_sgs <= 0 || nr_sgs > 8) {
233			dev_err(ss->dev, "Invalid sg number %d\n", nr_sgs);
234			err = -EINVAL;
235			goto theend_iv;
236		}
237		nr_sgd = nr_sgs;
238	} else {
239		nr_sgs = dma_map_sg(ss->dev, areq->src, nsgs, DMA_TO_DEVICE);
240		if (nr_sgs <= 0 || nr_sgs > 8) {
241			dev_err(ss->dev, "Invalid sg number %d\n", nr_sgs);
242			err = -EINVAL;
243			goto theend_iv;
244		}
245		nr_sgd = dma_map_sg(ss->dev, areq->dst, nsgd, DMA_FROM_DEVICE);
246		if (nr_sgd <= 0 || nr_sgd > 8) {
247			dev_err(ss->dev, "Invalid sg number %d\n", nr_sgd);
248			err = -EINVAL;
249			goto theend_sgs;
250		}
251	}
252
253	len = areq->cryptlen;
254	i = 0;
255	sg = areq->src;
256	while (i < nr_sgs && sg && len) {
257		if (sg_dma_len(sg) == 0)
258			goto sgs_next;
259		rctx->t_src[i].addr = sg_dma_address(sg);
260		todo = min(len, sg_dma_len(sg));
261		rctx->t_src[i].len = todo / 4;
262		dev_dbg(ss->dev, "%s total=%u SGS(%d %u off=%d) todo=%u\n", __func__,
263			areq->cryptlen, i, rctx->t_src[i].len, sg->offset, todo);
264		len -= todo;
265		i++;
266sgs_next:
267		sg = sg_next(sg);
268	}
269	if (len > 0) {
270		dev_err(ss->dev, "remaining len %d\n", len);
271		err = -EINVAL;
272		goto theend_sgs;
273	}
274
275	len = areq->cryptlen;
276	i = 0;
277	sg = areq->dst;
278	while (i < nr_sgd && sg && len) {
279		if (sg_dma_len(sg) == 0)
280			goto sgd_next;
281		rctx->t_dst[i].addr = sg_dma_address(sg);
282		todo = min(len, sg_dma_len(sg));
283		rctx->t_dst[i].len = todo / 4;
284		dev_dbg(ss->dev, "%s total=%u SGD(%d %u off=%d) todo=%u\n", __func__,
285			areq->cryptlen, i, rctx->t_dst[i].len, sg->offset, todo);
286		len -= todo;
287		i++;
288sgd_next:
289		sg = sg_next(sg);
290	}
291	if (len > 0) {
292		dev_err(ss->dev, "remaining len %d\n", len);
293		err = -EINVAL;
294		goto theend_sgs;
295	}
296
297	err = sun8i_ss_run_task(ss, rctx, crypto_tfm_alg_name(areq->base.tfm));
298
299theend_sgs:
300	if (areq->src == areq->dst) {
301		dma_unmap_sg(ss->dev, areq->src, nsgs, DMA_BIDIRECTIONAL);
302	} else {
303		dma_unmap_sg(ss->dev, areq->src, nsgs, DMA_TO_DEVICE);
304		dma_unmap_sg(ss->dev, areq->dst, nsgd, DMA_FROM_DEVICE);
305	}
306
307theend_iv:
308	if (areq->iv && ivsize > 0) {
309		for (i = 0; i < rctx->niv; i++) {
310			dma_unmap_single(ss->dev, rctx->p_iv[i], ivsize, DMA_TO_DEVICE);
311			memzero_explicit(sf->iv[i], ivsize);
312		}
313
314		offset = areq->cryptlen - ivsize;
315		if (rctx->op_dir & SS_DECRYPTION) {
316			memcpy(areq->iv, sf->biv, ivsize);
317			memzero_explicit(sf->biv, ivsize);
318		} else {
319			scatterwalk_map_and_copy(areq->iv, areq->dst, offset,
320					ivsize, 0);
321		}
322	}
323
324theend_key:
325	dma_unmap_single(ss->dev, rctx->p_key, op->keylen, DMA_TO_DEVICE);
326
327theend:
328
329	return err;
330}
331
332int sun8i_ss_handle_cipher_request(struct crypto_engine *engine, void *areq)
333{
334	int err;
335	struct skcipher_request *breq = container_of(areq, struct skcipher_request, base);
336
337	err = sun8i_ss_cipher(breq);
338	local_bh_disable();
339	crypto_finalize_skcipher_request(engine, breq, err);
340	local_bh_enable();
341
342	return 0;
343}
344
345int sun8i_ss_skdecrypt(struct skcipher_request *areq)
346{
347	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
348	struct sun8i_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm);
349	struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq);
350	struct crypto_engine *engine;
351	int e;
352
353	memset(rctx, 0, sizeof(struct sun8i_cipher_req_ctx));
354	rctx->op_dir = SS_DECRYPTION;
355
356	if (sun8i_ss_need_fallback(areq))
357		return sun8i_ss_cipher_fallback(areq);
358
359	e = sun8i_ss_get_engine_number(op->ss);
360	engine = op->ss->flows[e].engine;
361	rctx->flow = e;
362
363	return crypto_transfer_skcipher_request_to_engine(engine, areq);
364}
365
366int sun8i_ss_skencrypt(struct skcipher_request *areq)
367{
368	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
369	struct sun8i_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm);
370	struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq);
371	struct crypto_engine *engine;
372	int e;
373
374	memset(rctx, 0, sizeof(struct sun8i_cipher_req_ctx));
375	rctx->op_dir = SS_ENCRYPTION;
376
377	if (sun8i_ss_need_fallback(areq))
378		return sun8i_ss_cipher_fallback(areq);
379
380	e = sun8i_ss_get_engine_number(op->ss);
381	engine = op->ss->flows[e].engine;
382	rctx->flow = e;
383
384	return crypto_transfer_skcipher_request_to_engine(engine, areq);
385}
386
387int sun8i_ss_cipher_init(struct crypto_tfm *tfm)
388{
389	struct sun8i_cipher_tfm_ctx *op = crypto_tfm_ctx(tfm);
390	struct sun8i_ss_alg_template *algt;
391	const char *name = crypto_tfm_alg_name(tfm);
392	struct crypto_skcipher *sktfm = __crypto_skcipher_cast(tfm);
393	struct skcipher_alg *alg = crypto_skcipher_alg(sktfm);
394	int err;
395
396	memset(op, 0, sizeof(struct sun8i_cipher_tfm_ctx));
397
398	algt = container_of(alg, struct sun8i_ss_alg_template, alg.skcipher.base);
399	op->ss = algt->ss;
400
401	op->fallback_tfm = crypto_alloc_skcipher(name, 0, CRYPTO_ALG_NEED_FALLBACK);
402	if (IS_ERR(op->fallback_tfm)) {
403		dev_err(op->ss->dev, "ERROR: Cannot allocate fallback for %s %ld\n",
404			name, PTR_ERR(op->fallback_tfm));
405		return PTR_ERR(op->fallback_tfm);
406	}
407
408	crypto_skcipher_set_reqsize(sktfm, sizeof(struct sun8i_cipher_req_ctx) +
409				    crypto_skcipher_reqsize(op->fallback_tfm));
410
411	memcpy(algt->fbname,
412	       crypto_tfm_alg_driver_name(crypto_skcipher_tfm(op->fallback_tfm)),
413	       CRYPTO_MAX_ALG_NAME);
414
415	err = pm_runtime_resume_and_get(op->ss->dev);
416	if (err < 0) {
417		dev_err(op->ss->dev, "pm error %d\n", err);
418		goto error_pm;
419	}
420
421	return 0;
422error_pm:
423	crypto_free_skcipher(op->fallback_tfm);
424	return err;
425}
426
427void sun8i_ss_cipher_exit(struct crypto_tfm *tfm)
428{
429	struct sun8i_cipher_tfm_ctx *op = crypto_tfm_ctx(tfm);
430
431	kfree_sensitive(op->key);
432	crypto_free_skcipher(op->fallback_tfm);
433	pm_runtime_put_sync(op->ss->dev);
434}
435
436int sun8i_ss_aes_setkey(struct crypto_skcipher *tfm, const u8 *key,
437			unsigned int keylen)
438{
439	struct sun8i_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm);
440	struct sun8i_ss_dev *ss = op->ss;
441
442	switch (keylen) {
443	case 128 / 8:
444		break;
445	case 192 / 8:
446		break;
447	case 256 / 8:
448		break;
449	default:
450		dev_dbg(ss->dev, "ERROR: Invalid keylen %u\n", keylen);
451		return -EINVAL;
452	}
453	kfree_sensitive(op->key);
454	op->keylen = keylen;
455	op->key = kmemdup(key, keylen, GFP_KERNEL);
456	if (!op->key)
457		return -ENOMEM;
458
459	crypto_skcipher_clear_flags(op->fallback_tfm, CRYPTO_TFM_REQ_MASK);
460	crypto_skcipher_set_flags(op->fallback_tfm, tfm->base.crt_flags & CRYPTO_TFM_REQ_MASK);
461
462	return crypto_skcipher_setkey(op->fallback_tfm, key, keylen);
463}
464
465int sun8i_ss_des3_setkey(struct crypto_skcipher *tfm, const u8 *key,
466			 unsigned int keylen)
467{
468	struct sun8i_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm);
469	struct sun8i_ss_dev *ss = op->ss;
470
471	if (unlikely(keylen != 3 * DES_KEY_SIZE)) {
472		dev_dbg(ss->dev, "Invalid keylen %u\n", keylen);
473		return -EINVAL;
474	}
475
476	kfree_sensitive(op->key);
477	op->keylen = keylen;
478	op->key = kmemdup(key, keylen, GFP_KERNEL);
479	if (!op->key)
480		return -ENOMEM;
481
482	crypto_skcipher_clear_flags(op->fallback_tfm, CRYPTO_TFM_REQ_MASK);
483	crypto_skcipher_set_flags(op->fallback_tfm, tfm->base.crt_flags & CRYPTO_TFM_REQ_MASK);
484
485	return crypto_skcipher_setkey(op->fallback_tfm, key, keylen);
486}
487