1/* SPDX-License-Identifier: GPL-2.0 */
2/*
3 * Software async crypto daemon
4 *
5 * Added AEAD support to cryptd.
6 *    Authors: Tadeusz Struk (tadeusz.struk@intel.com)
7 *             Adrian Hoban <adrian.hoban@intel.com>
8 *             Gabriele Paoloni <gabriele.paoloni@intel.com>
9 *             Aidan O'Mahony (aidan.o.mahony@intel.com)
10 *    Copyright (c) 2010, Intel Corporation.
11 */
12
13#ifndef _CRYPTO_CRYPT_H
14#define _CRYPTO_CRYPT_H
15
16#include <linux/types.h>
17
18#include <crypto/aead.h>
19#include <crypto/hash.h>
20#include <crypto/skcipher.h>
21
22struct cryptd_skcipher {
23	struct crypto_skcipher base;
24};
25
26/* alg_name should be algorithm to be cryptd-ed */
27struct cryptd_skcipher *cryptd_alloc_skcipher(const char *alg_name,
28					      u32 type, u32 mask);
29struct crypto_skcipher *cryptd_skcipher_child(struct cryptd_skcipher *tfm);
30/* Must be called without moving CPUs. */
31bool cryptd_skcipher_queued(struct cryptd_skcipher *tfm);
32void cryptd_free_skcipher(struct cryptd_skcipher *tfm);
33
34struct cryptd_ahash {
35	struct crypto_ahash base;
36};
37
38static inline struct cryptd_ahash *__cryptd_ahash_cast(
39	struct crypto_ahash *tfm)
40{
41	return (struct cryptd_ahash *)tfm;
42}
43
44/* alg_name should be algorithm to be cryptd-ed */
45struct cryptd_ahash *cryptd_alloc_ahash(const char *alg_name,
46					u32 type, u32 mask);
47struct crypto_shash *cryptd_ahash_child(struct cryptd_ahash *tfm);
48struct shash_desc *cryptd_shash_desc(struct ahash_request *req);
49/* Must be called without moving CPUs. */
50bool cryptd_ahash_queued(struct cryptd_ahash *tfm);
51void cryptd_free_ahash(struct cryptd_ahash *tfm);
52
53struct cryptd_aead {
54	struct crypto_aead base;
55};
56
57static inline struct cryptd_aead *__cryptd_aead_cast(
58	struct crypto_aead *tfm)
59{
60	return (struct cryptd_aead *)tfm;
61}
62
63struct cryptd_aead *cryptd_alloc_aead(const char *alg_name,
64					  u32 type, u32 mask);
65
66struct crypto_aead *cryptd_aead_child(struct cryptd_aead *tfm);
67/* Must be called without moving CPUs. */
68bool cryptd_aead_queued(struct cryptd_aead *tfm);
69
70void cryptd_free_aead(struct cryptd_aead *tfm);
71
72#endif
73