1// SPDX-License-Identifier: GPL-2.0-or-later
2/*
3 * Cryptographic API.
4 *
5 * SHA-512 and SHA-384 Secure Hash Algorithm.
6 *
7 * Adapted for OCTEON by Aaro Koskinen <aaro.koskinen@iki.fi>.
8 *
9 * Based on crypto/sha512_generic.c, which is:
10 *
11 * Copyright (c) Jean-Luc Cooke <jlcooke@certainkey.com>
12 * Copyright (c) Andrew McDonald <andrew@mcdonald.org.uk>
13 * Copyright (c) 2003 Kyle McMartin <kyle@debian.org>
14 */
15
16#include <linux/mm.h>
17#include <crypto/sha2.h>
18#include <crypto/sha512_base.h>
19#include <linux/init.h>
20#include <linux/types.h>
21#include <linux/module.h>
22#include <asm/byteorder.h>
23#include <asm/octeon/octeon.h>
24#include <crypto/internal/hash.h>
25
26#include "octeon-crypto.h"
27
28/*
29 * We pass everything as 64-bit. OCTEON can handle misaligned data.
30 */
31
32static void octeon_sha512_store_hash(struct sha512_state *sctx)
33{
34	write_octeon_64bit_hash_sha512(sctx->state[0], 0);
35	write_octeon_64bit_hash_sha512(sctx->state[1], 1);
36	write_octeon_64bit_hash_sha512(sctx->state[2], 2);
37	write_octeon_64bit_hash_sha512(sctx->state[3], 3);
38	write_octeon_64bit_hash_sha512(sctx->state[4], 4);
39	write_octeon_64bit_hash_sha512(sctx->state[5], 5);
40	write_octeon_64bit_hash_sha512(sctx->state[6], 6);
41	write_octeon_64bit_hash_sha512(sctx->state[7], 7);
42}
43
44static void octeon_sha512_read_hash(struct sha512_state *sctx)
45{
46	sctx->state[0] = read_octeon_64bit_hash_sha512(0);
47	sctx->state[1] = read_octeon_64bit_hash_sha512(1);
48	sctx->state[2] = read_octeon_64bit_hash_sha512(2);
49	sctx->state[3] = read_octeon_64bit_hash_sha512(3);
50	sctx->state[4] = read_octeon_64bit_hash_sha512(4);
51	sctx->state[5] = read_octeon_64bit_hash_sha512(5);
52	sctx->state[6] = read_octeon_64bit_hash_sha512(6);
53	sctx->state[7] = read_octeon_64bit_hash_sha512(7);
54}
55
56static void octeon_sha512_transform(const void *_block)
57{
58	const u64 *block = _block;
59
60	write_octeon_64bit_block_sha512(block[0], 0);
61	write_octeon_64bit_block_sha512(block[1], 1);
62	write_octeon_64bit_block_sha512(block[2], 2);
63	write_octeon_64bit_block_sha512(block[3], 3);
64	write_octeon_64bit_block_sha512(block[4], 4);
65	write_octeon_64bit_block_sha512(block[5], 5);
66	write_octeon_64bit_block_sha512(block[6], 6);
67	write_octeon_64bit_block_sha512(block[7], 7);
68	write_octeon_64bit_block_sha512(block[8], 8);
69	write_octeon_64bit_block_sha512(block[9], 9);
70	write_octeon_64bit_block_sha512(block[10], 10);
71	write_octeon_64bit_block_sha512(block[11], 11);
72	write_octeon_64bit_block_sha512(block[12], 12);
73	write_octeon_64bit_block_sha512(block[13], 13);
74	write_octeon_64bit_block_sha512(block[14], 14);
75	octeon_sha512_start(block[15]);
76}
77
78static void __octeon_sha512_update(struct sha512_state *sctx, const u8 *data,
79				   unsigned int len)
80{
81	unsigned int part_len;
82	unsigned int index;
83	unsigned int i;
84
85	/* Compute number of bytes mod 128. */
86	index = sctx->count[0] % SHA512_BLOCK_SIZE;
87
88	/* Update number of bytes. */
89	if ((sctx->count[0] += len) < len)
90		sctx->count[1]++;
91
92	part_len = SHA512_BLOCK_SIZE - index;
93
94	/* Transform as many times as possible. */
95	if (len >= part_len) {
96		memcpy(&sctx->buf[index], data, part_len);
97		octeon_sha512_transform(sctx->buf);
98
99		for (i = part_len; i + SHA512_BLOCK_SIZE <= len;
100			i += SHA512_BLOCK_SIZE)
101			octeon_sha512_transform(&data[i]);
102
103		index = 0;
104	} else {
105		i = 0;
106	}
107
108	/* Buffer remaining input. */
109	memcpy(&sctx->buf[index], &data[i], len - i);
110}
111
112static int octeon_sha512_update(struct shash_desc *desc, const u8 *data,
113				unsigned int len)
114{
115	struct sha512_state *sctx = shash_desc_ctx(desc);
116	struct octeon_cop2_state state;
117	unsigned long flags;
118
119	/*
120	 * Small updates never reach the crypto engine, so the generic sha512 is
121	 * faster because of the heavyweight octeon_crypto_enable() /
122	 * octeon_crypto_disable().
123	 */
124	if ((sctx->count[0] % SHA512_BLOCK_SIZE) + len < SHA512_BLOCK_SIZE)
125		return crypto_sha512_update(desc, data, len);
126
127	flags = octeon_crypto_enable(&state);
128	octeon_sha512_store_hash(sctx);
129
130	__octeon_sha512_update(sctx, data, len);
131
132	octeon_sha512_read_hash(sctx);
133	octeon_crypto_disable(&state, flags);
134
135	return 0;
136}
137
138static int octeon_sha512_final(struct shash_desc *desc, u8 *hash)
139{
140	struct sha512_state *sctx = shash_desc_ctx(desc);
141	static u8 padding[128] = { 0x80, };
142	struct octeon_cop2_state state;
143	__be64 *dst = (__be64 *)hash;
144	unsigned int pad_len;
145	unsigned long flags;
146	unsigned int index;
147	__be64 bits[2];
148	int i;
149
150	/* Save number of bits. */
151	bits[1] = cpu_to_be64(sctx->count[0] << 3);
152	bits[0] = cpu_to_be64(sctx->count[1] << 3 | sctx->count[0] >> 61);
153
154	/* Pad out to 112 mod 128. */
155	index = sctx->count[0] & 0x7f;
156	pad_len = (index < 112) ? (112 - index) : ((128+112) - index);
157
158	flags = octeon_crypto_enable(&state);
159	octeon_sha512_store_hash(sctx);
160
161	__octeon_sha512_update(sctx, padding, pad_len);
162
163	/* Append length (before padding). */
164	__octeon_sha512_update(sctx, (const u8 *)bits, sizeof(bits));
165
166	octeon_sha512_read_hash(sctx);
167	octeon_crypto_disable(&state, flags);
168
169	/* Store state in digest. */
170	for (i = 0; i < 8; i++)
171		dst[i] = cpu_to_be64(sctx->state[i]);
172
173	/* Zeroize sensitive information. */
174	memset(sctx, 0, sizeof(struct sha512_state));
175
176	return 0;
177}
178
179static int octeon_sha384_final(struct shash_desc *desc, u8 *hash)
180{
181	u8 D[64];
182
183	octeon_sha512_final(desc, D);
184
185	memcpy(hash, D, 48);
186	memzero_explicit(D, 64);
187
188	return 0;
189}
190
191static struct shash_alg octeon_sha512_algs[2] = { {
192	.digestsize	=	SHA512_DIGEST_SIZE,
193	.init		=	sha512_base_init,
194	.update		=	octeon_sha512_update,
195	.final		=	octeon_sha512_final,
196	.descsize	=	sizeof(struct sha512_state),
197	.base		=	{
198		.cra_name	=	"sha512",
199		.cra_driver_name=	"octeon-sha512",
200		.cra_priority	=	OCTEON_CR_OPCODE_PRIORITY,
201		.cra_blocksize	=	SHA512_BLOCK_SIZE,
202		.cra_module	=	THIS_MODULE,
203	}
204}, {
205	.digestsize	=	SHA384_DIGEST_SIZE,
206	.init		=	sha384_base_init,
207	.update		=	octeon_sha512_update,
208	.final		=	octeon_sha384_final,
209	.descsize	=	sizeof(struct sha512_state),
210	.base		=	{
211		.cra_name	=	"sha384",
212		.cra_driver_name=	"octeon-sha384",
213		.cra_priority	=	OCTEON_CR_OPCODE_PRIORITY,
214		.cra_blocksize	=	SHA384_BLOCK_SIZE,
215		.cra_module	=	THIS_MODULE,
216	}
217} };
218
219static int __init octeon_sha512_mod_init(void)
220{
221	if (!octeon_has_crypto())
222		return -ENOTSUPP;
223	return crypto_register_shashes(octeon_sha512_algs,
224				       ARRAY_SIZE(octeon_sha512_algs));
225}
226
227static void __exit octeon_sha512_mod_fini(void)
228{
229	crypto_unregister_shashes(octeon_sha512_algs,
230				  ARRAY_SIZE(octeon_sha512_algs));
231}
232
233module_init(octeon_sha512_mod_init);
234module_exit(octeon_sha512_mod_fini);
235
236MODULE_LICENSE("GPL");
237MODULE_DESCRIPTION("SHA-512 and SHA-384 Secure Hash Algorithms (OCTEON)");
238MODULE_AUTHOR("Aaro Koskinen <aaro.koskinen@iki.fi>");
239