1/*
2 * Asynchronous Cryptographic Hash operations.
3 *
4 * This is the asynchronous version of hash.c with notification of
5 * completion via a callback.
6 *
7 * Copyright (c) 2008 Loc Ho <lho@amcc.com>
8 *
9 * This program is free software; you can redistribute it and/or modify it
10 * under the terms of the GNU General Public License as published by the Free
11 * Software Foundation; either version 2 of the License, or (at your option)
12 * any later version.
13 *
14 */
15
16#include <crypto/internal/hash.h>
17#include <crypto/scatterwalk.h>
18#include <linux/err.h>
19#include <linux/kernel.h>
20#include <linux/module.h>
21#include <linux/sched.h>
22#include <linux/slab.h>
23#include <linux/seq_file.h>
24
25#include "internal.h"
26
27struct ahash_request_priv {
28	crypto_completion_t complete;
29	void *data;
30	u8 *result;
31	void *ubuf[] CRYPTO_MINALIGN_ATTR;
32};
33
34static inline struct ahash_alg *crypto_ahash_alg(struct crypto_ahash *hash)
35{
36	return container_of(crypto_hash_alg_common(hash), struct ahash_alg,
37			    halg);
38}
39
40static int hash_walk_next(struct crypto_hash_walk *walk)
41{
42	unsigned int alignmask = walk->alignmask;
43	unsigned int offset = walk->offset;
44	unsigned int nbytes = min(walk->entrylen,
45				  ((unsigned int)(PAGE_SIZE)) - offset);
46
47	walk->data = crypto_kmap(walk->pg, 0);
48	walk->data += offset;
49
50	if (offset & alignmask) {
51		unsigned int unaligned = alignmask + 1 - (offset & alignmask);
52		if (nbytes > unaligned)
53			nbytes = unaligned;
54	}
55
56	walk->entrylen -= nbytes;
57	return nbytes;
58}
59
60static int hash_walk_new_entry(struct crypto_hash_walk *walk)
61{
62	struct scatterlist *sg;
63
64	sg = walk->sg;
65	walk->pg = sg_page(sg);
66	walk->offset = sg->offset;
67	walk->entrylen = sg->length;
68
69	if (walk->entrylen > walk->total)
70		walk->entrylen = walk->total;
71	walk->total -= walk->entrylen;
72
73	return hash_walk_next(walk);
74}
75
76int crypto_hash_walk_done(struct crypto_hash_walk *walk, int err)
77{
78	unsigned int alignmask = walk->alignmask;
79	unsigned int nbytes = walk->entrylen;
80
81	walk->data -= walk->offset;
82
83	if (nbytes && walk->offset & alignmask && !err) {
84		walk->offset = ALIGN(walk->offset, alignmask + 1);
85		walk->data += walk->offset;
86
87		nbytes = min(nbytes,
88			     ((unsigned int)(PAGE_SIZE)) - walk->offset);
89		walk->entrylen -= nbytes;
90
91		return nbytes;
92	}
93
94	crypto_kunmap(walk->data, 0);
95	crypto_yield(walk->flags);
96
97	if (err)
98		return err;
99
100	if (nbytes) {
101		walk->offset = 0;
102		walk->pg++;
103		return hash_walk_next(walk);
104	}
105
106	if (!walk->total)
107		return 0;
108
109	walk->sg = scatterwalk_sg_next(walk->sg);
110
111	return hash_walk_new_entry(walk);
112}
113EXPORT_SYMBOL_GPL(crypto_hash_walk_done);
114
115int crypto_hash_walk_first(struct ahash_request *req,
116			   struct crypto_hash_walk *walk)
117{
118	walk->total = req->nbytes;
119
120	if (!walk->total)
121		return 0;
122
123	walk->alignmask = crypto_ahash_alignmask(crypto_ahash_reqtfm(req));
124	walk->sg = req->src;
125	walk->flags = req->base.flags;
126
127	return hash_walk_new_entry(walk);
128}
129EXPORT_SYMBOL_GPL(crypto_hash_walk_first);
130
131int crypto_hash_walk_first_compat(struct hash_desc *hdesc,
132				  struct crypto_hash_walk *walk,
133				  struct scatterlist *sg, unsigned int len)
134{
135	walk->total = len;
136
137	if (!walk->total)
138		return 0;
139
140	walk->alignmask = crypto_hash_alignmask(hdesc->tfm);
141	walk->sg = sg;
142	walk->flags = hdesc->flags;
143
144	return hash_walk_new_entry(walk);
145}
146
147static int ahash_setkey_unaligned(struct crypto_ahash *tfm, const u8 *key,
148				unsigned int keylen)
149{
150	unsigned long alignmask = crypto_ahash_alignmask(tfm);
151	int ret;
152	u8 *buffer, *alignbuffer;
153	unsigned long absize;
154
155	absize = keylen + alignmask;
156	buffer = kmalloc(absize, GFP_KERNEL);
157	if (!buffer)
158		return -ENOMEM;
159
160	alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
161	memcpy(alignbuffer, key, keylen);
162	ret = tfm->setkey(tfm, alignbuffer, keylen);
163	kzfree(buffer);
164	return ret;
165}
166
167int crypto_ahash_setkey(struct crypto_ahash *tfm, const u8 *key,
168			unsigned int keylen)
169{
170	unsigned long alignmask = crypto_ahash_alignmask(tfm);
171
172	if ((unsigned long)key & alignmask)
173		return ahash_setkey_unaligned(tfm, key, keylen);
174
175	return tfm->setkey(tfm, key, keylen);
176}
177EXPORT_SYMBOL_GPL(crypto_ahash_setkey);
178
179static int ahash_nosetkey(struct crypto_ahash *tfm, const u8 *key,
180			  unsigned int keylen)
181{
182	return -ENOSYS;
183}
184
185static inline unsigned int ahash_align_buffer_size(unsigned len,
186						   unsigned long mask)
187{
188	return len + (mask & ~(crypto_tfm_ctx_alignment() - 1));
189}
190
191static void ahash_op_unaligned_finish(struct ahash_request *req, int err)
192{
193	struct ahash_request_priv *priv = req->priv;
194
195	if (err == -EINPROGRESS)
196		return;
197
198	if (!err)
199		memcpy(priv->result, req->result,
200		       crypto_ahash_digestsize(crypto_ahash_reqtfm(req)));
201
202	kzfree(priv);
203}
204
205static void ahash_op_unaligned_done(struct crypto_async_request *req, int err)
206{
207	struct ahash_request *areq = req->data;
208	struct ahash_request_priv *priv = areq->priv;
209	crypto_completion_t complete = priv->complete;
210	void *data = priv->data;
211
212	ahash_op_unaligned_finish(areq, err);
213
214	complete(data, err);
215}
216
217static int ahash_op_unaligned(struct ahash_request *req,
218			      int (*op)(struct ahash_request *))
219{
220	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
221	unsigned long alignmask = crypto_ahash_alignmask(tfm);
222	unsigned int ds = crypto_ahash_digestsize(tfm);
223	struct ahash_request_priv *priv;
224	int err;
225
226	priv = kmalloc(sizeof(*priv) + ahash_align_buffer_size(ds, alignmask),
227		       (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
228		       GFP_KERNEL : GFP_ATOMIC);
229	if (!priv)
230		return -ENOMEM;
231
232	priv->result = req->result;
233	priv->complete = req->base.complete;
234	priv->data = req->base.data;
235
236	req->result = PTR_ALIGN((u8 *)priv->ubuf, alignmask + 1);
237	req->base.complete = ahash_op_unaligned_done;
238	req->base.data = req;
239	req->priv = priv;
240
241	err = op(req);
242	ahash_op_unaligned_finish(req, err);
243
244	return err;
245}
246
247static int crypto_ahash_op(struct ahash_request *req,
248			   int (*op)(struct ahash_request *))
249{
250	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
251	unsigned long alignmask = crypto_ahash_alignmask(tfm);
252
253	if ((unsigned long)req->result & alignmask)
254		return ahash_op_unaligned(req, op);
255
256	return op(req);
257}
258
259int crypto_ahash_final(struct ahash_request *req)
260{
261	return crypto_ahash_op(req, crypto_ahash_reqtfm(req)->final);
262}
263EXPORT_SYMBOL_GPL(crypto_ahash_final);
264
265int crypto_ahash_finup(struct ahash_request *req)
266{
267	return crypto_ahash_op(req, crypto_ahash_reqtfm(req)->finup);
268}
269EXPORT_SYMBOL_GPL(crypto_ahash_finup);
270
271int crypto_ahash_digest(struct ahash_request *req)
272{
273	return crypto_ahash_op(req, crypto_ahash_reqtfm(req)->digest);
274}
275EXPORT_SYMBOL_GPL(crypto_ahash_digest);
276
277static void ahash_def_finup_finish2(struct ahash_request *req, int err)
278{
279	struct ahash_request_priv *priv = req->priv;
280
281	if (err == -EINPROGRESS)
282		return;
283
284	if (!err)
285		memcpy(priv->result, req->result,
286		       crypto_ahash_digestsize(crypto_ahash_reqtfm(req)));
287
288	kzfree(priv);
289}
290
291static void ahash_def_finup_done2(struct crypto_async_request *req, int err)
292{
293	struct ahash_request *areq = req->data;
294	struct ahash_request_priv *priv = areq->priv;
295	crypto_completion_t complete = priv->complete;
296	void *data = priv->data;
297
298	ahash_def_finup_finish2(areq, err);
299
300	complete(data, err);
301}
302
303static int ahash_def_finup_finish1(struct ahash_request *req, int err)
304{
305	if (err)
306		goto out;
307
308	req->base.complete = ahash_def_finup_done2;
309	req->base.flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
310	err = crypto_ahash_reqtfm(req)->final(req);
311
312out:
313	ahash_def_finup_finish2(req, err);
314	return err;
315}
316
317static void ahash_def_finup_done1(struct crypto_async_request *req, int err)
318{
319	struct ahash_request *areq = req->data;
320	struct ahash_request_priv *priv = areq->priv;
321	crypto_completion_t complete = priv->complete;
322	void *data = priv->data;
323
324	err = ahash_def_finup_finish1(areq, err);
325
326	complete(data, err);
327}
328
329static int ahash_def_finup(struct ahash_request *req)
330{
331	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
332	unsigned long alignmask = crypto_ahash_alignmask(tfm);
333	unsigned int ds = crypto_ahash_digestsize(tfm);
334	struct ahash_request_priv *priv;
335
336	priv = kmalloc(sizeof(*priv) + ahash_align_buffer_size(ds, alignmask),
337		       (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
338		       GFP_KERNEL : GFP_ATOMIC);
339	if (!priv)
340		return -ENOMEM;
341
342	priv->result = req->result;
343	priv->complete = req->base.complete;
344	priv->data = req->base.data;
345
346	req->result = PTR_ALIGN((u8 *)priv->ubuf, alignmask + 1);
347	req->base.complete = ahash_def_finup_done1;
348	req->base.data = req;
349	req->priv = priv;
350
351	return ahash_def_finup_finish1(req, tfm->update(req));
352}
353
354static int ahash_no_export(struct ahash_request *req, void *out)
355{
356	return -ENOSYS;
357}
358
359static int ahash_no_import(struct ahash_request *req, const void *in)
360{
361	return -ENOSYS;
362}
363
364static int crypto_ahash_init_tfm(struct crypto_tfm *tfm)
365{
366	struct crypto_ahash *hash = __crypto_ahash_cast(tfm);
367	struct ahash_alg *alg = crypto_ahash_alg(hash);
368
369	hash->setkey = ahash_nosetkey;
370	hash->export = ahash_no_export;
371	hash->import = ahash_no_import;
372
373	if (tfm->__crt_alg->cra_type != &crypto_ahash_type)
374		return crypto_init_shash_ops_async(tfm);
375
376	hash->init = alg->init;
377	hash->update = alg->update;
378	hash->final = alg->final;
379	hash->finup = alg->finup ?: ahash_def_finup;
380	hash->digest = alg->digest;
381
382	if (alg->setkey)
383		hash->setkey = alg->setkey;
384	if (alg->export)
385		hash->export = alg->export;
386	if (alg->import)
387		hash->import = alg->import;
388
389	return 0;
390}
391
392static unsigned int crypto_ahash_extsize(struct crypto_alg *alg)
393{
394	if (alg->cra_type == &crypto_ahash_type)
395		return alg->cra_ctxsize;
396
397	return sizeof(struct crypto_shash *);
398}
399
400static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg)
401	__attribute__ ((unused));
402static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg)
403{
404	seq_printf(m, "type         : ahash\n");
405	seq_printf(m, "async        : %s\n", alg->cra_flags & CRYPTO_ALG_ASYNC ?
406					     "yes" : "no");
407	seq_printf(m, "blocksize    : %u\n", alg->cra_blocksize);
408	seq_printf(m, "digestsize   : %u\n",
409		   __crypto_hash_alg_common(alg)->digestsize);
410}
411
412const struct crypto_type crypto_ahash_type = {
413	.extsize = crypto_ahash_extsize,
414	.init_tfm = crypto_ahash_init_tfm,
415#ifdef CONFIG_PROC_FS
416	.show = crypto_ahash_show,
417#endif
418	.maskclear = ~CRYPTO_ALG_TYPE_MASK,
419	.maskset = CRYPTO_ALG_TYPE_AHASH_MASK,
420	.type = CRYPTO_ALG_TYPE_AHASH,
421	.tfmsize = offsetof(struct crypto_ahash, base),
422};
423EXPORT_SYMBOL_GPL(crypto_ahash_type);
424
425struct crypto_ahash *crypto_alloc_ahash(const char *alg_name, u32 type,
426					u32 mask)
427{
428	return crypto_alloc_tfm(alg_name, &crypto_ahash_type, type, mask);
429}
430EXPORT_SYMBOL_GPL(crypto_alloc_ahash);
431
432static int ahash_prepare_alg(struct ahash_alg *alg)
433{
434	struct crypto_alg *base = &alg->halg.base;
435
436	if (alg->halg.digestsize > PAGE_SIZE / 8 ||
437	    alg->halg.statesize > PAGE_SIZE / 8)
438		return -EINVAL;
439
440	base->cra_type = &crypto_ahash_type;
441	base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
442	base->cra_flags |= CRYPTO_ALG_TYPE_AHASH;
443
444	return 0;
445}
446
447int crypto_register_ahash(struct ahash_alg *alg)
448{
449	struct crypto_alg *base = &alg->halg.base;
450	int err;
451
452	err = ahash_prepare_alg(alg);
453	if (err)
454		return err;
455
456	return crypto_register_alg(base);
457}
458EXPORT_SYMBOL_GPL(crypto_register_ahash);
459
460int crypto_unregister_ahash(struct ahash_alg *alg)
461{
462	return crypto_unregister_alg(&alg->halg.base);
463}
464EXPORT_SYMBOL_GPL(crypto_unregister_ahash);
465
466int ahash_register_instance(struct crypto_template *tmpl,
467			    struct ahash_instance *inst)
468{
469	int err;
470
471	err = ahash_prepare_alg(&inst->alg);
472	if (err)
473		return err;
474
475	return crypto_register_instance(tmpl, ahash_crypto_instance(inst));
476}
477EXPORT_SYMBOL_GPL(ahash_register_instance);
478
479void ahash_free_instance(struct crypto_instance *inst)
480{
481	crypto_drop_spawn(crypto_instance_ctx(inst));
482	kfree(ahash_instance(inst));
483}
484EXPORT_SYMBOL_GPL(ahash_free_instance);
485
486int crypto_init_ahash_spawn(struct crypto_ahash_spawn *spawn,
487			    struct hash_alg_common *alg,
488			    struct crypto_instance *inst)
489{
490	return crypto_init_spawn2(&spawn->base, &alg->base, inst,
491				  &crypto_ahash_type);
492}
493EXPORT_SYMBOL_GPL(crypto_init_ahash_spawn);
494
495struct hash_alg_common *ahash_attr_alg(struct rtattr *rta, u32 type, u32 mask)
496{
497	struct crypto_alg *alg;
498
499	alg = crypto_attr_alg2(rta, &crypto_ahash_type, type, mask);
500	return IS_ERR(alg) ? ERR_CAST(alg) : __crypto_hash_alg_common(alg);
501}
502EXPORT_SYMBOL_GPL(ahash_attr_alg);
503
504MODULE_LICENSE("GPL");
505MODULE_DESCRIPTION("Asynchronous cryptographic hash type");
506