1// SPDX-License-Identifier: GPL-2.0-only
2/*
3 * sha2-ce-glue.c - SHA-224/SHA-256 using ARMv8 Crypto Extensions
4 *
5 * Copyright (C) 2014 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
6 */
7
8#include <asm/neon.h>
9#include <asm/simd.h>
10#include <asm/unaligned.h>
11#include <crypto/internal/hash.h>
12#include <crypto/internal/simd.h>
13#include <crypto/sha2.h>
14#include <crypto/sha256_base.h>
15#include <linux/cpufeature.h>
16#include <linux/crypto.h>
17#include <linux/module.h>
18
19MODULE_DESCRIPTION("SHA-224/SHA-256 secure hash using ARMv8 Crypto Extensions");
20MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
21MODULE_LICENSE("GPL v2");
22MODULE_ALIAS_CRYPTO("sha224");
23MODULE_ALIAS_CRYPTO("sha256");
24
25struct sha256_ce_state {
26	struct sha256_state	sst;
27	u32			finalize;
28};
29
30extern const u32 sha256_ce_offsetof_count;
31extern const u32 sha256_ce_offsetof_finalize;
32
33asmlinkage int __sha256_ce_transform(struct sha256_ce_state *sst, u8 const *src,
34				     int blocks);
35
36static void sha256_ce_transform(struct sha256_state *sst, u8 const *src,
37				int blocks)
38{
39	while (blocks) {
40		int rem;
41
42		kernel_neon_begin();
43		rem = __sha256_ce_transform(container_of(sst,
44							 struct sha256_ce_state,
45							 sst), src, blocks);
46		kernel_neon_end();
47		src += (blocks - rem) * SHA256_BLOCK_SIZE;
48		blocks = rem;
49	}
50}
51
52const u32 sha256_ce_offsetof_count = offsetof(struct sha256_ce_state,
53					      sst.count);
54const u32 sha256_ce_offsetof_finalize = offsetof(struct sha256_ce_state,
55						 finalize);
56
57asmlinkage void sha256_block_data_order(u32 *digest, u8 const *src, int blocks);
58
59static void sha256_arm64_transform(struct sha256_state *sst, u8 const *src,
60				   int blocks)
61{
62	sha256_block_data_order(sst->state, src, blocks);
63}
64
65static int sha256_ce_update(struct shash_desc *desc, const u8 *data,
66			    unsigned int len)
67{
68	struct sha256_ce_state *sctx = shash_desc_ctx(desc);
69
70	if (!crypto_simd_usable())
71		return sha256_base_do_update(desc, data, len,
72					     sha256_arm64_transform);
73
74	sctx->finalize = 0;
75	sha256_base_do_update(desc, data, len, sha256_ce_transform);
76
77	return 0;
78}
79
80static int sha256_ce_finup(struct shash_desc *desc, const u8 *data,
81			   unsigned int len, u8 *out)
82{
83	struct sha256_ce_state *sctx = shash_desc_ctx(desc);
84	bool finalize = !sctx->sst.count && !(len % SHA256_BLOCK_SIZE) && len;
85
86	if (!crypto_simd_usable()) {
87		if (len)
88			sha256_base_do_update(desc, data, len,
89					      sha256_arm64_transform);
90		sha256_base_do_finalize(desc, sha256_arm64_transform);
91		return sha256_base_finish(desc, out);
92	}
93
94	/*
95	 * Allow the asm code to perform the finalization if there is no
96	 * partial data and the input is a round multiple of the block size.
97	 */
98	sctx->finalize = finalize;
99
100	sha256_base_do_update(desc, data, len, sha256_ce_transform);
101	if (!finalize)
102		sha256_base_do_finalize(desc, sha256_ce_transform);
103	return sha256_base_finish(desc, out);
104}
105
106static int sha256_ce_final(struct shash_desc *desc, u8 *out)
107{
108	struct sha256_ce_state *sctx = shash_desc_ctx(desc);
109
110	if (!crypto_simd_usable()) {
111		sha256_base_do_finalize(desc, sha256_arm64_transform);
112		return sha256_base_finish(desc, out);
113	}
114
115	sctx->finalize = 0;
116	sha256_base_do_finalize(desc, sha256_ce_transform);
117	return sha256_base_finish(desc, out);
118}
119
120static int sha256_ce_digest(struct shash_desc *desc, const u8 *data,
121			    unsigned int len, u8 *out)
122{
123	sha256_base_init(desc);
124	return sha256_ce_finup(desc, data, len, out);
125}
126
127static int sha256_ce_export(struct shash_desc *desc, void *out)
128{
129	struct sha256_ce_state *sctx = shash_desc_ctx(desc);
130
131	memcpy(out, &sctx->sst, sizeof(struct sha256_state));
132	return 0;
133}
134
135static int sha256_ce_import(struct shash_desc *desc, const void *in)
136{
137	struct sha256_ce_state *sctx = shash_desc_ctx(desc);
138
139	memcpy(&sctx->sst, in, sizeof(struct sha256_state));
140	sctx->finalize = 0;
141	return 0;
142}
143
144static struct shash_alg algs[] = { {
145	.init			= sha224_base_init,
146	.update			= sha256_ce_update,
147	.final			= sha256_ce_final,
148	.finup			= sha256_ce_finup,
149	.export			= sha256_ce_export,
150	.import			= sha256_ce_import,
151	.descsize		= sizeof(struct sha256_ce_state),
152	.statesize		= sizeof(struct sha256_state),
153	.digestsize		= SHA224_DIGEST_SIZE,
154	.base			= {
155		.cra_name		= "sha224",
156		.cra_driver_name	= "sha224-ce",
157		.cra_priority		= 200,
158		.cra_blocksize		= SHA256_BLOCK_SIZE,
159		.cra_module		= THIS_MODULE,
160	}
161}, {
162	.init			= sha256_base_init,
163	.update			= sha256_ce_update,
164	.final			= sha256_ce_final,
165	.finup			= sha256_ce_finup,
166	.digest			= sha256_ce_digest,
167	.export			= sha256_ce_export,
168	.import			= sha256_ce_import,
169	.descsize		= sizeof(struct sha256_ce_state),
170	.statesize		= sizeof(struct sha256_state),
171	.digestsize		= SHA256_DIGEST_SIZE,
172	.base			= {
173		.cra_name		= "sha256",
174		.cra_driver_name	= "sha256-ce",
175		.cra_priority		= 200,
176		.cra_blocksize		= SHA256_BLOCK_SIZE,
177		.cra_module		= THIS_MODULE,
178	}
179} };
180
181static int __init sha2_ce_mod_init(void)
182{
183	return crypto_register_shashes(algs, ARRAY_SIZE(algs));
184}
185
186static void __exit sha2_ce_mod_fini(void)
187{
188	crypto_unregister_shashes(algs, ARRAY_SIZE(algs));
189}
190
191module_cpu_feature_match(SHA2, sha2_ce_mod_init);
192module_exit(sha2_ce_mod_fini);
193