1/* SPDX-License-Identifier: GPL-2.0 */
2
3#ifndef _CRYPTO_ECB_CBC_HELPER_H
4#define _CRYPTO_ECB_CBC_HELPER_H
5
6#include <crypto/internal/skcipher.h>
7#include <asm/fpu/api.h>
8
9/*
10 * Mode helpers to instantiate parameterized skcipher ECB/CBC modes without
11 * having to rely on indirect calls and retpolines.
12 */
13
14#define ECB_WALK_START(req, bsize, fpu_blocks) do {			\
15	void *ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));	\
16	const int __fpu_blocks = (fpu_blocks);				\
17	const int __bsize = (bsize);					\
18	struct skcipher_walk walk;					\
19	int err = skcipher_walk_virt(&walk, (req), false);		\
20	while (walk.nbytes > 0) {					\
21		unsigned int nbytes = walk.nbytes;			\
22		bool do_fpu = __fpu_blocks != -1 &&			\
23			      nbytes >= __fpu_blocks * __bsize;		\
24		const u8 *src = walk.src.virt.addr;			\
25		u8 *dst = walk.dst.virt.addr;				\
26		u8 __maybe_unused buf[(bsize)];				\
27		if (do_fpu) kernel_fpu_begin()
28
29#define CBC_WALK_START(req, bsize, fpu_blocks)				\
30	ECB_WALK_START(req, bsize, fpu_blocks)
31
32#define ECB_WALK_ADVANCE(blocks) do {					\
33	dst += (blocks) * __bsize;					\
34	src += (blocks) * __bsize;					\
35	nbytes -= (blocks) * __bsize;					\
36} while (0)
37
38#define ECB_BLOCK(blocks, func) do {					\
39	const int __blocks = (blocks);					\
40	if (do_fpu && __blocks < __fpu_blocks) {			\
41		kernel_fpu_end();					\
42		do_fpu = false;						\
43	}								\
44	while (nbytes >= __blocks * __bsize) {				\
45		(func)(ctx, dst, src);					\
46		ECB_WALK_ADVANCE(blocks);				\
47	}								\
48} while (0)
49
50#define CBC_ENC_BLOCK(func) do {					\
51	const u8 *__iv = walk.iv;					\
52	while (nbytes >= __bsize) {					\
53		crypto_xor_cpy(dst, src, __iv, __bsize);		\
54		(func)(ctx, dst, dst);					\
55		__iv = dst;						\
56		ECB_WALK_ADVANCE(1);					\
57	}								\
58	memcpy(walk.iv, __iv, __bsize);					\
59} while (0)
60
61#define CBC_DEC_BLOCK(blocks, func) do {				\
62	const int __blocks = (blocks);					\
63	if (do_fpu && __blocks <  __fpu_blocks) {			\
64		kernel_fpu_end();					\
65		do_fpu = false;						\
66	}								\
67	while (nbytes >= __blocks * __bsize) {				\
68		const u8 *__iv = src + ((blocks) - 1) * __bsize;	\
69		if (dst == src)						\
70			__iv = memcpy(buf, __iv, __bsize);		\
71		(func)(ctx, dst, src);					\
72		crypto_xor(dst, walk.iv, __bsize);			\
73		memcpy(walk.iv, __iv, __bsize);				\
74		ECB_WALK_ADVANCE(blocks);				\
75	}								\
76} while (0)
77
78#define ECB_WALK_END()							\
79		if (do_fpu) kernel_fpu_end();				\
80		err = skcipher_walk_done(&walk, nbytes);		\
81	}								\
82	return err;							\
83} while (0)
84
85#define CBC_WALK_END() ECB_WALK_END()
86
87#endif
88