1// SPDX-License-Identifier: GPL-2.0
2/*
3 * s390 ChaCha stream cipher.
4 *
5 * Copyright IBM Corp. 2021
6 */
7
8#define KMSG_COMPONENT "chacha_s390"
9#define pr_fmt(fmt) KMSG_COMPONENT ": " fmt
10
11#include <crypto/internal/chacha.h>
12#include <crypto/internal/skcipher.h>
13#include <crypto/algapi.h>
14#include <linux/cpufeature.h>
15#include <linux/kernel.h>
16#include <linux/module.h>
17#include <linux/sizes.h>
18#include <asm/fpu.h>
19#include "chacha-s390.h"
20
21static void chacha20_crypt_s390(u32 *state, u8 *dst, const u8 *src,
22				unsigned int nbytes, const u32 *key,
23				u32 *counter)
24{
25	DECLARE_KERNEL_FPU_ONSTACK32(vxstate);
26
27	kernel_fpu_begin(&vxstate, KERNEL_VXR);
28	chacha20_vx(dst, src, nbytes, key, counter);
29	kernel_fpu_end(&vxstate, KERNEL_VXR);
30
31	*counter += round_up(nbytes, CHACHA_BLOCK_SIZE) / CHACHA_BLOCK_SIZE;
32}
33
34static int chacha20_s390(struct skcipher_request *req)
35{
36	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
37	struct chacha_ctx *ctx = crypto_skcipher_ctx(tfm);
38	u32 state[CHACHA_STATE_WORDS] __aligned(16);
39	struct skcipher_walk walk;
40	unsigned int nbytes;
41	int rc;
42
43	rc = skcipher_walk_virt(&walk, req, false);
44	chacha_init_generic(state, ctx->key, req->iv);
45
46	while (walk.nbytes > 0) {
47		nbytes = walk.nbytes;
48		if (nbytes < walk.total)
49			nbytes = round_down(nbytes, walk.stride);
50
51		if (nbytes <= CHACHA_BLOCK_SIZE) {
52			chacha_crypt_generic(state, walk.dst.virt.addr,
53					     walk.src.virt.addr, nbytes,
54					     ctx->nrounds);
55		} else {
56			chacha20_crypt_s390(state, walk.dst.virt.addr,
57					    walk.src.virt.addr, nbytes,
58					    &state[4], &state[12]);
59		}
60		rc = skcipher_walk_done(&walk, walk.nbytes - nbytes);
61	}
62	return rc;
63}
64
65void hchacha_block_arch(const u32 *state, u32 *stream, int nrounds)
66{
67	/* TODO: implement hchacha_block_arch() in assembly */
68	hchacha_block_generic(state, stream, nrounds);
69}
70EXPORT_SYMBOL(hchacha_block_arch);
71
72void chacha_init_arch(u32 *state, const u32 *key, const u8 *iv)
73{
74	chacha_init_generic(state, key, iv);
75}
76EXPORT_SYMBOL(chacha_init_arch);
77
78void chacha_crypt_arch(u32 *state, u8 *dst, const u8 *src,
79		       unsigned int bytes, int nrounds)
80{
81	/* s390 chacha20 implementation has 20 rounds hard-coded,
82	 * it cannot handle a block of data or less, but otherwise
83	 * it can handle data of arbitrary size
84	 */
85	if (bytes <= CHACHA_BLOCK_SIZE || nrounds != 20 || !cpu_has_vx())
86		chacha_crypt_generic(state, dst, src, bytes, nrounds);
87	else
88		chacha20_crypt_s390(state, dst, src, bytes,
89				    &state[4], &state[12]);
90}
91EXPORT_SYMBOL(chacha_crypt_arch);
92
93static struct skcipher_alg chacha_algs[] = {
94	{
95		.base.cra_name		= "chacha20",
96		.base.cra_driver_name	= "chacha20-s390",
97		.base.cra_priority	= 900,
98		.base.cra_blocksize	= 1,
99		.base.cra_ctxsize	= sizeof(struct chacha_ctx),
100		.base.cra_module	= THIS_MODULE,
101
102		.min_keysize		= CHACHA_KEY_SIZE,
103		.max_keysize		= CHACHA_KEY_SIZE,
104		.ivsize			= CHACHA_IV_SIZE,
105		.chunksize		= CHACHA_BLOCK_SIZE,
106		.setkey			= chacha20_setkey,
107		.encrypt		= chacha20_s390,
108		.decrypt		= chacha20_s390,
109	}
110};
111
112static int __init chacha_mod_init(void)
113{
114	return IS_REACHABLE(CONFIG_CRYPTO_SKCIPHER) ?
115		crypto_register_skciphers(chacha_algs, ARRAY_SIZE(chacha_algs)) : 0;
116}
117
118static void __exit chacha_mod_fini(void)
119{
120	if (IS_REACHABLE(CONFIG_CRYPTO_SKCIPHER))
121		crypto_unregister_skciphers(chacha_algs, ARRAY_SIZE(chacha_algs));
122}
123
124module_cpu_feature_match(S390_CPU_FEATURE_VXRS, chacha_mod_init);
125module_exit(chacha_mod_fini);
126
127MODULE_DESCRIPTION("ChaCha20 stream cipher");
128MODULE_LICENSE("GPL v2");
129
130MODULE_ALIAS_CRYPTO("chacha20");
131