1// SPDX-License-Identifier: GPL-2.0-or-later
2/*
3 * Glue Code for the AVX assembler implementation of the Cast5 Cipher
4 *
5 * Copyright (C) 2012 Johannes Goetzfried
6 *     <Johannes.Goetzfried@informatik.stud.uni-erlangen.de>
7 */
8
9#include <crypto/algapi.h>
10#include <crypto/cast5.h>
11#include <crypto/internal/simd.h>
12#include <linux/crypto.h>
13#include <linux/err.h>
14#include <linux/module.h>
15#include <linux/types.h>
16
17#include "ecb_cbc_helpers.h"
18
19#define CAST5_PARALLEL_BLOCKS 16
20
21asmlinkage void cast5_ecb_enc_16way(struct cast5_ctx *ctx, u8 *dst,
22				    const u8 *src);
23asmlinkage void cast5_ecb_dec_16way(struct cast5_ctx *ctx, u8 *dst,
24				    const u8 *src);
25asmlinkage void cast5_cbc_dec_16way(struct cast5_ctx *ctx, u8 *dst,
26				    const u8 *src);
27
28static int cast5_setkey_skcipher(struct crypto_skcipher *tfm, const u8 *key,
29				 unsigned int keylen)
30{
31	return cast5_setkey(&tfm->base, key, keylen);
32}
33
34static int ecb_encrypt(struct skcipher_request *req)
35{
36	ECB_WALK_START(req, CAST5_BLOCK_SIZE, CAST5_PARALLEL_BLOCKS);
37	ECB_BLOCK(CAST5_PARALLEL_BLOCKS, cast5_ecb_enc_16way);
38	ECB_BLOCK(1, __cast5_encrypt);
39	ECB_WALK_END();
40}
41
42static int ecb_decrypt(struct skcipher_request *req)
43{
44	ECB_WALK_START(req, CAST5_BLOCK_SIZE, CAST5_PARALLEL_BLOCKS);
45	ECB_BLOCK(CAST5_PARALLEL_BLOCKS, cast5_ecb_dec_16way);
46	ECB_BLOCK(1, __cast5_decrypt);
47	ECB_WALK_END();
48}
49
50static int cbc_encrypt(struct skcipher_request *req)
51{
52	CBC_WALK_START(req, CAST5_BLOCK_SIZE, -1);
53	CBC_ENC_BLOCK(__cast5_encrypt);
54	CBC_WALK_END();
55}
56
57static int cbc_decrypt(struct skcipher_request *req)
58{
59	CBC_WALK_START(req, CAST5_BLOCK_SIZE, CAST5_PARALLEL_BLOCKS);
60	CBC_DEC_BLOCK(CAST5_PARALLEL_BLOCKS, cast5_cbc_dec_16way);
61	CBC_DEC_BLOCK(1, __cast5_decrypt);
62	CBC_WALK_END();
63}
64
65static struct skcipher_alg cast5_algs[] = {
66	{
67		.base.cra_name		= "__ecb(cast5)",
68		.base.cra_driver_name	= "__ecb-cast5-avx",
69		.base.cra_priority	= 200,
70		.base.cra_flags		= CRYPTO_ALG_INTERNAL,
71		.base.cra_blocksize	= CAST5_BLOCK_SIZE,
72		.base.cra_ctxsize	= sizeof(struct cast5_ctx),
73		.base.cra_module	= THIS_MODULE,
74		.min_keysize		= CAST5_MIN_KEY_SIZE,
75		.max_keysize		= CAST5_MAX_KEY_SIZE,
76		.setkey			= cast5_setkey_skcipher,
77		.encrypt		= ecb_encrypt,
78		.decrypt		= ecb_decrypt,
79	}, {
80		.base.cra_name		= "__cbc(cast5)",
81		.base.cra_driver_name	= "__cbc-cast5-avx",
82		.base.cra_priority	= 200,
83		.base.cra_flags		= CRYPTO_ALG_INTERNAL,
84		.base.cra_blocksize	= CAST5_BLOCK_SIZE,
85		.base.cra_ctxsize	= sizeof(struct cast5_ctx),
86		.base.cra_module	= THIS_MODULE,
87		.min_keysize		= CAST5_MIN_KEY_SIZE,
88		.max_keysize		= CAST5_MAX_KEY_SIZE,
89		.ivsize			= CAST5_BLOCK_SIZE,
90		.setkey			= cast5_setkey_skcipher,
91		.encrypt		= cbc_encrypt,
92		.decrypt		= cbc_decrypt,
93	}
94};
95
96static struct simd_skcipher_alg *cast5_simd_algs[ARRAY_SIZE(cast5_algs)];
97
98static int __init cast5_init(void)
99{
100	const char *feature_name;
101
102	if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM,
103				&feature_name)) {
104		pr_info("CPU feature '%s' is not supported.\n", feature_name);
105		return -ENODEV;
106	}
107
108	return simd_register_skciphers_compat(cast5_algs,
109					      ARRAY_SIZE(cast5_algs),
110					      cast5_simd_algs);
111}
112
113static void __exit cast5_exit(void)
114{
115	simd_unregister_skciphers(cast5_algs, ARRAY_SIZE(cast5_algs),
116				  cast5_simd_algs);
117}
118
119module_init(cast5_init);
120module_exit(cast5_exit);
121
122MODULE_DESCRIPTION("Cast5 Cipher Algorithm, AVX optimized");
123MODULE_LICENSE("GPL");
124MODULE_ALIAS_CRYPTO("cast5");
125