1// SPDX-License-Identifier: GPL-2.0
2/*
3 * HCTR2 length-preserving encryption mode
4 *
5 * Copyright 2021 Google LLC
6 */
7
8
9/*
10 * HCTR2 is a length-preserving encryption mode that is efficient on
11 * processors with instructions to accelerate AES and carryless
12 * multiplication, e.g. x86 processors with AES-NI and CLMUL, and ARM
13 * processors with the ARMv8 crypto extensions.
14 *
15 * For more details, see the paper: "Length-preserving encryption with HCTR2"
16 * (https://eprint.iacr.org/2021/1441.pdf)
17 */
18
19#include <crypto/internal/cipher.h>
20#include <crypto/internal/hash.h>
21#include <crypto/internal/skcipher.h>
22#include <crypto/polyval.h>
23#include <crypto/scatterwalk.h>
24#include <linux/module.h>
25
26#define BLOCKCIPHER_BLOCK_SIZE		16
27
28/*
29 * The specification allows variable-length tweaks, but Linux's crypto API
30 * currently only allows algorithms to support a single length.  The "natural"
31 * tweak length for HCTR2 is 16, since that fits into one POLYVAL block for
32 * the best performance.  But longer tweaks are useful for fscrypt, to avoid
33 * needing to derive per-file keys.  So instead we use two blocks, or 32 bytes.
34 */
35#define TWEAK_SIZE		32
36
37struct hctr2_instance_ctx {
38	struct crypto_cipher_spawn blockcipher_spawn;
39	struct crypto_skcipher_spawn xctr_spawn;
40	struct crypto_shash_spawn polyval_spawn;
41};
42
43struct hctr2_tfm_ctx {
44	struct crypto_cipher *blockcipher;
45	struct crypto_skcipher *xctr;
46	struct crypto_shash *polyval;
47	u8 L[BLOCKCIPHER_BLOCK_SIZE];
48	int hashed_tweak_offset;
49	/*
50	 * This struct is allocated with extra space for two exported hash
51	 * states.  Since the hash state size is not known at compile-time, we
52	 * can't add these to the struct directly.
53	 *
54	 * hashed_tweaklen_divisible;
55	 * hashed_tweaklen_remainder;
56	 */
57};
58
59struct hctr2_request_ctx {
60	u8 first_block[BLOCKCIPHER_BLOCK_SIZE];
61	u8 xctr_iv[BLOCKCIPHER_BLOCK_SIZE];
62	struct scatterlist *bulk_part_dst;
63	struct scatterlist *bulk_part_src;
64	struct scatterlist sg_src[2];
65	struct scatterlist sg_dst[2];
66	/*
67	 * Sub-request sizes are unknown at compile-time, so they need to go
68	 * after the members with known sizes.
69	 */
70	union {
71		struct shash_desc hash_desc;
72		struct skcipher_request xctr_req;
73	} u;
74	/*
75	 * This struct is allocated with extra space for one exported hash
76	 * state.  Since the hash state size is not known at compile-time, we
77	 * can't add it to the struct directly.
78	 *
79	 * hashed_tweak;
80	 */
81};
82
83static inline u8 *hctr2_hashed_tweaklen(const struct hctr2_tfm_ctx *tctx,
84					bool has_remainder)
85{
86	u8 *p = (u8 *)tctx + sizeof(*tctx);
87
88	if (has_remainder) /* For messages not a multiple of block length */
89		p += crypto_shash_statesize(tctx->polyval);
90	return p;
91}
92
93static inline u8 *hctr2_hashed_tweak(const struct hctr2_tfm_ctx *tctx,
94				     struct hctr2_request_ctx *rctx)
95{
96	return (u8 *)rctx + tctx->hashed_tweak_offset;
97}
98
99/*
100 * The input data for each HCTR2 hash step begins with a 16-byte block that
101 * contains the tweak length and a flag that indicates whether the input is evenly
102 * divisible into blocks.  Since this implementation only supports one tweak
103 * length, we precompute the two hash states resulting from hashing the two
104 * possible values of this initial block.  This reduces by one block the amount of
105 * data that needs to be hashed for each encryption/decryption
106 *
107 * These precomputed hashes are stored in hctr2_tfm_ctx.
108 */
109static int hctr2_hash_tweaklen(struct hctr2_tfm_ctx *tctx, bool has_remainder)
110{
111	SHASH_DESC_ON_STACK(shash, tfm->polyval);
112	__le64 tweak_length_block[2];
113	int err;
114
115	shash->tfm = tctx->polyval;
116	memset(tweak_length_block, 0, sizeof(tweak_length_block));
117
118	tweak_length_block[0] = cpu_to_le64(TWEAK_SIZE * 8 * 2 + 2 + has_remainder);
119	err = crypto_shash_init(shash);
120	if (err)
121		return err;
122	err = crypto_shash_update(shash, (u8 *)tweak_length_block,
123				  POLYVAL_BLOCK_SIZE);
124	if (err)
125		return err;
126	return crypto_shash_export(shash, hctr2_hashed_tweaklen(tctx, has_remainder));
127}
128
129static int hctr2_setkey(struct crypto_skcipher *tfm, const u8 *key,
130			unsigned int keylen)
131{
132	struct hctr2_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
133	u8 hbar[BLOCKCIPHER_BLOCK_SIZE];
134	int err;
135
136	crypto_cipher_clear_flags(tctx->blockcipher, CRYPTO_TFM_REQ_MASK);
137	crypto_cipher_set_flags(tctx->blockcipher,
138				crypto_skcipher_get_flags(tfm) &
139				CRYPTO_TFM_REQ_MASK);
140	err = crypto_cipher_setkey(tctx->blockcipher, key, keylen);
141	if (err)
142		return err;
143
144	crypto_skcipher_clear_flags(tctx->xctr, CRYPTO_TFM_REQ_MASK);
145	crypto_skcipher_set_flags(tctx->xctr,
146				  crypto_skcipher_get_flags(tfm) &
147				  CRYPTO_TFM_REQ_MASK);
148	err = crypto_skcipher_setkey(tctx->xctr, key, keylen);
149	if (err)
150		return err;
151
152	memset(hbar, 0, sizeof(hbar));
153	crypto_cipher_encrypt_one(tctx->blockcipher, hbar, hbar);
154
155	memset(tctx->L, 0, sizeof(tctx->L));
156	tctx->L[0] = 0x01;
157	crypto_cipher_encrypt_one(tctx->blockcipher, tctx->L, tctx->L);
158
159	crypto_shash_clear_flags(tctx->polyval, CRYPTO_TFM_REQ_MASK);
160	crypto_shash_set_flags(tctx->polyval, crypto_skcipher_get_flags(tfm) &
161			       CRYPTO_TFM_REQ_MASK);
162	err = crypto_shash_setkey(tctx->polyval, hbar, BLOCKCIPHER_BLOCK_SIZE);
163	if (err)
164		return err;
165	memzero_explicit(hbar, sizeof(hbar));
166
167	return hctr2_hash_tweaklen(tctx, true) ?: hctr2_hash_tweaklen(tctx, false);
168}
169
170static int hctr2_hash_tweak(struct skcipher_request *req)
171{
172	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
173	const struct hctr2_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
174	struct hctr2_request_ctx *rctx = skcipher_request_ctx(req);
175	struct shash_desc *hash_desc = &rctx->u.hash_desc;
176	int err;
177	bool has_remainder = req->cryptlen % POLYVAL_BLOCK_SIZE;
178
179	hash_desc->tfm = tctx->polyval;
180	err = crypto_shash_import(hash_desc, hctr2_hashed_tweaklen(tctx, has_remainder));
181	if (err)
182		return err;
183	err = crypto_shash_update(hash_desc, req->iv, TWEAK_SIZE);
184	if (err)
185		return err;
186
187	// Store the hashed tweak, since we need it when computing both
188	// H(T || N) and H(T || V).
189	return crypto_shash_export(hash_desc, hctr2_hashed_tweak(tctx, rctx));
190}
191
192static int hctr2_hash_message(struct skcipher_request *req,
193			      struct scatterlist *sgl,
194			      u8 digest[POLYVAL_DIGEST_SIZE])
195{
196	static const u8 padding[BLOCKCIPHER_BLOCK_SIZE] = { 0x1 };
197	struct hctr2_request_ctx *rctx = skcipher_request_ctx(req);
198	struct shash_desc *hash_desc = &rctx->u.hash_desc;
199	const unsigned int bulk_len = req->cryptlen - BLOCKCIPHER_BLOCK_SIZE;
200	struct sg_mapping_iter miter;
201	unsigned int remainder = bulk_len % BLOCKCIPHER_BLOCK_SIZE;
202	int i;
203	int err = 0;
204	int n = 0;
205
206	sg_miter_start(&miter, sgl, sg_nents(sgl),
207		       SG_MITER_FROM_SG | SG_MITER_ATOMIC);
208	for (i = 0; i < bulk_len; i += n) {
209		sg_miter_next(&miter);
210		n = min_t(unsigned int, miter.length, bulk_len - i);
211		err = crypto_shash_update(hash_desc, miter.addr, n);
212		if (err)
213			break;
214	}
215	sg_miter_stop(&miter);
216
217	if (err)
218		return err;
219
220	if (remainder) {
221		err = crypto_shash_update(hash_desc, padding,
222					  BLOCKCIPHER_BLOCK_SIZE - remainder);
223		if (err)
224			return err;
225	}
226	return crypto_shash_final(hash_desc, digest);
227}
228
229static int hctr2_finish(struct skcipher_request *req)
230{
231	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
232	const struct hctr2_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
233	struct hctr2_request_ctx *rctx = skcipher_request_ctx(req);
234	u8 digest[POLYVAL_DIGEST_SIZE];
235	struct shash_desc *hash_desc = &rctx->u.hash_desc;
236	int err;
237
238	// U = UU ^ H(T || V)
239	// or M = MM ^ H(T || N)
240	hash_desc->tfm = tctx->polyval;
241	err = crypto_shash_import(hash_desc, hctr2_hashed_tweak(tctx, rctx));
242	if (err)
243		return err;
244	err = hctr2_hash_message(req, rctx->bulk_part_dst, digest);
245	if (err)
246		return err;
247	crypto_xor(rctx->first_block, digest, BLOCKCIPHER_BLOCK_SIZE);
248
249	// Copy U (or M) into dst scatterlist
250	scatterwalk_map_and_copy(rctx->first_block, req->dst,
251				 0, BLOCKCIPHER_BLOCK_SIZE, 1);
252	return 0;
253}
254
255static void hctr2_xctr_done(void *data, int err)
256{
257	struct skcipher_request *req = data;
258
259	if (!err)
260		err = hctr2_finish(req);
261
262	skcipher_request_complete(req, err);
263}
264
265static int hctr2_crypt(struct skcipher_request *req, bool enc)
266{
267	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
268	const struct hctr2_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
269	struct hctr2_request_ctx *rctx = skcipher_request_ctx(req);
270	u8 digest[POLYVAL_DIGEST_SIZE];
271	int bulk_len = req->cryptlen - BLOCKCIPHER_BLOCK_SIZE;
272	int err;
273
274	// Requests must be at least one block
275	if (req->cryptlen < BLOCKCIPHER_BLOCK_SIZE)
276		return -EINVAL;
277
278	// Copy M (or U) into a temporary buffer
279	scatterwalk_map_and_copy(rctx->first_block, req->src,
280				 0, BLOCKCIPHER_BLOCK_SIZE, 0);
281
282	// Create scatterlists for N and V
283	rctx->bulk_part_src = scatterwalk_ffwd(rctx->sg_src, req->src,
284					       BLOCKCIPHER_BLOCK_SIZE);
285	rctx->bulk_part_dst = scatterwalk_ffwd(rctx->sg_dst, req->dst,
286					       BLOCKCIPHER_BLOCK_SIZE);
287
288	// MM = M ^ H(T || N)
289	// or UU = U ^ H(T || V)
290	err = hctr2_hash_tweak(req);
291	if (err)
292		return err;
293	err = hctr2_hash_message(req, rctx->bulk_part_src, digest);
294	if (err)
295		return err;
296	crypto_xor(digest, rctx->first_block, BLOCKCIPHER_BLOCK_SIZE);
297
298	// UU = E(MM)
299	// or MM = D(UU)
300	if (enc)
301		crypto_cipher_encrypt_one(tctx->blockcipher, rctx->first_block,
302					  digest);
303	else
304		crypto_cipher_decrypt_one(tctx->blockcipher, rctx->first_block,
305					  digest);
306
307	// S = MM ^ UU ^ L
308	crypto_xor(digest, rctx->first_block, BLOCKCIPHER_BLOCK_SIZE);
309	crypto_xor_cpy(rctx->xctr_iv, digest, tctx->L, BLOCKCIPHER_BLOCK_SIZE);
310
311	// V = XCTR(S, N)
312	// or N = XCTR(S, V)
313	skcipher_request_set_tfm(&rctx->u.xctr_req, tctx->xctr);
314	skcipher_request_set_crypt(&rctx->u.xctr_req, rctx->bulk_part_src,
315				   rctx->bulk_part_dst, bulk_len,
316				   rctx->xctr_iv);
317	skcipher_request_set_callback(&rctx->u.xctr_req,
318				      req->base.flags,
319				      hctr2_xctr_done, req);
320	return crypto_skcipher_encrypt(&rctx->u.xctr_req) ?:
321		hctr2_finish(req);
322}
323
324static int hctr2_encrypt(struct skcipher_request *req)
325{
326	return hctr2_crypt(req, true);
327}
328
329static int hctr2_decrypt(struct skcipher_request *req)
330{
331	return hctr2_crypt(req, false);
332}
333
334static int hctr2_init_tfm(struct crypto_skcipher *tfm)
335{
336	struct skcipher_instance *inst = skcipher_alg_instance(tfm);
337	struct hctr2_instance_ctx *ictx = skcipher_instance_ctx(inst);
338	struct hctr2_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
339	struct crypto_skcipher *xctr;
340	struct crypto_cipher *blockcipher;
341	struct crypto_shash *polyval;
342	unsigned int subreq_size;
343	int err;
344
345	xctr = crypto_spawn_skcipher(&ictx->xctr_spawn);
346	if (IS_ERR(xctr))
347		return PTR_ERR(xctr);
348
349	blockcipher = crypto_spawn_cipher(&ictx->blockcipher_spawn);
350	if (IS_ERR(blockcipher)) {
351		err = PTR_ERR(blockcipher);
352		goto err_free_xctr;
353	}
354
355	polyval = crypto_spawn_shash(&ictx->polyval_spawn);
356	if (IS_ERR(polyval)) {
357		err = PTR_ERR(polyval);
358		goto err_free_blockcipher;
359	}
360
361	tctx->xctr = xctr;
362	tctx->blockcipher = blockcipher;
363	tctx->polyval = polyval;
364
365	BUILD_BUG_ON(offsetofend(struct hctr2_request_ctx, u) !=
366				 sizeof(struct hctr2_request_ctx));
367	subreq_size = max(sizeof_field(struct hctr2_request_ctx, u.hash_desc) +
368			  crypto_shash_descsize(polyval),
369			  sizeof_field(struct hctr2_request_ctx, u.xctr_req) +
370			  crypto_skcipher_reqsize(xctr));
371
372	tctx->hashed_tweak_offset = offsetof(struct hctr2_request_ctx, u) +
373				    subreq_size;
374	crypto_skcipher_set_reqsize(tfm, tctx->hashed_tweak_offset +
375				    crypto_shash_statesize(polyval));
376	return 0;
377
378err_free_blockcipher:
379	crypto_free_cipher(blockcipher);
380err_free_xctr:
381	crypto_free_skcipher(xctr);
382	return err;
383}
384
385static void hctr2_exit_tfm(struct crypto_skcipher *tfm)
386{
387	struct hctr2_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
388
389	crypto_free_cipher(tctx->blockcipher);
390	crypto_free_skcipher(tctx->xctr);
391	crypto_free_shash(tctx->polyval);
392}
393
394static void hctr2_free_instance(struct skcipher_instance *inst)
395{
396	struct hctr2_instance_ctx *ictx = skcipher_instance_ctx(inst);
397
398	crypto_drop_cipher(&ictx->blockcipher_spawn);
399	crypto_drop_skcipher(&ictx->xctr_spawn);
400	crypto_drop_shash(&ictx->polyval_spawn);
401	kfree(inst);
402}
403
404static int hctr2_create_common(struct crypto_template *tmpl,
405			       struct rtattr **tb,
406			       const char *xctr_name,
407			       const char *polyval_name)
408{
409	struct skcipher_alg_common *xctr_alg;
410	u32 mask;
411	struct skcipher_instance *inst;
412	struct hctr2_instance_ctx *ictx;
413	struct crypto_alg *blockcipher_alg;
414	struct shash_alg *polyval_alg;
415	char blockcipher_name[CRYPTO_MAX_ALG_NAME];
416	int len;
417	int err;
418
419	err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SKCIPHER, &mask);
420	if (err)
421		return err;
422
423	inst = kzalloc(sizeof(*inst) + sizeof(*ictx), GFP_KERNEL);
424	if (!inst)
425		return -ENOMEM;
426	ictx = skcipher_instance_ctx(inst);
427
428	/* Stream cipher, xctr(block_cipher) */
429	err = crypto_grab_skcipher(&ictx->xctr_spawn,
430				   skcipher_crypto_instance(inst),
431				   xctr_name, 0, mask);
432	if (err)
433		goto err_free_inst;
434	xctr_alg = crypto_spawn_skcipher_alg_common(&ictx->xctr_spawn);
435
436	err = -EINVAL;
437	if (strncmp(xctr_alg->base.cra_name, "xctr(", 5))
438		goto err_free_inst;
439	len = strscpy(blockcipher_name, xctr_alg->base.cra_name + 5,
440		      sizeof(blockcipher_name));
441	if (len < 1)
442		goto err_free_inst;
443	if (blockcipher_name[len - 1] != ')')
444		goto err_free_inst;
445	blockcipher_name[len - 1] = 0;
446
447	/* Block cipher, e.g. "aes" */
448	err = crypto_grab_cipher(&ictx->blockcipher_spawn,
449				 skcipher_crypto_instance(inst),
450				 blockcipher_name, 0, mask);
451	if (err)
452		goto err_free_inst;
453	blockcipher_alg = crypto_spawn_cipher_alg(&ictx->blockcipher_spawn);
454
455	/* Require blocksize of 16 bytes */
456	err = -EINVAL;
457	if (blockcipher_alg->cra_blocksize != BLOCKCIPHER_BLOCK_SIZE)
458		goto err_free_inst;
459
460	/* Polyval ��-���U hash function */
461	err = crypto_grab_shash(&ictx->polyval_spawn,
462				skcipher_crypto_instance(inst),
463				polyval_name, 0, mask);
464	if (err)
465		goto err_free_inst;
466	polyval_alg = crypto_spawn_shash_alg(&ictx->polyval_spawn);
467
468	/* Ensure Polyval is being used */
469	err = -EINVAL;
470	if (strcmp(polyval_alg->base.cra_name, "polyval") != 0)
471		goto err_free_inst;
472
473	/* Instance fields */
474
475	err = -ENAMETOOLONG;
476	if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME, "hctr2(%s)",
477		     blockcipher_alg->cra_name) >= CRYPTO_MAX_ALG_NAME)
478		goto err_free_inst;
479	if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
480		     "hctr2_base(%s,%s)",
481		     xctr_alg->base.cra_driver_name,
482		     polyval_alg->base.cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
483		goto err_free_inst;
484
485	inst->alg.base.cra_blocksize = BLOCKCIPHER_BLOCK_SIZE;
486	inst->alg.base.cra_ctxsize = sizeof(struct hctr2_tfm_ctx) +
487				     polyval_alg->statesize * 2;
488	inst->alg.base.cra_alignmask = xctr_alg->base.cra_alignmask;
489	/*
490	 * The hash function is called twice, so it is weighted higher than the
491	 * xctr and blockcipher.
492	 */
493	inst->alg.base.cra_priority = (2 * xctr_alg->base.cra_priority +
494				       4 * polyval_alg->base.cra_priority +
495				       blockcipher_alg->cra_priority) / 7;
496
497	inst->alg.setkey = hctr2_setkey;
498	inst->alg.encrypt = hctr2_encrypt;
499	inst->alg.decrypt = hctr2_decrypt;
500	inst->alg.init = hctr2_init_tfm;
501	inst->alg.exit = hctr2_exit_tfm;
502	inst->alg.min_keysize = xctr_alg->min_keysize;
503	inst->alg.max_keysize = xctr_alg->max_keysize;
504	inst->alg.ivsize = TWEAK_SIZE;
505
506	inst->free = hctr2_free_instance;
507
508	err = skcipher_register_instance(tmpl, inst);
509	if (err) {
510err_free_inst:
511		hctr2_free_instance(inst);
512	}
513	return err;
514}
515
516static int hctr2_create_base(struct crypto_template *tmpl, struct rtattr **tb)
517{
518	const char *xctr_name;
519	const char *polyval_name;
520
521	xctr_name = crypto_attr_alg_name(tb[1]);
522	if (IS_ERR(xctr_name))
523		return PTR_ERR(xctr_name);
524
525	polyval_name = crypto_attr_alg_name(tb[2]);
526	if (IS_ERR(polyval_name))
527		return PTR_ERR(polyval_name);
528
529	return hctr2_create_common(tmpl, tb, xctr_name, polyval_name);
530}
531
532static int hctr2_create(struct crypto_template *tmpl, struct rtattr **tb)
533{
534	const char *blockcipher_name;
535	char xctr_name[CRYPTO_MAX_ALG_NAME];
536
537	blockcipher_name = crypto_attr_alg_name(tb[1]);
538	if (IS_ERR(blockcipher_name))
539		return PTR_ERR(blockcipher_name);
540
541	if (snprintf(xctr_name, CRYPTO_MAX_ALG_NAME, "xctr(%s)",
542		    blockcipher_name) >= CRYPTO_MAX_ALG_NAME)
543		return -ENAMETOOLONG;
544
545	return hctr2_create_common(tmpl, tb, xctr_name, "polyval");
546}
547
548static struct crypto_template hctr2_tmpls[] = {
549	{
550		/* hctr2_base(xctr_name, polyval_name) */
551		.name = "hctr2_base",
552		.create = hctr2_create_base,
553		.module = THIS_MODULE,
554	}, {
555		/* hctr2(blockcipher_name) */
556		.name = "hctr2",
557		.create = hctr2_create,
558		.module = THIS_MODULE,
559	}
560};
561
562static int __init hctr2_module_init(void)
563{
564	return crypto_register_templates(hctr2_tmpls, ARRAY_SIZE(hctr2_tmpls));
565}
566
567static void __exit hctr2_module_exit(void)
568{
569	return crypto_unregister_templates(hctr2_tmpls,
570					   ARRAY_SIZE(hctr2_tmpls));
571}
572
573subsys_initcall(hctr2_module_init);
574module_exit(hctr2_module_exit);
575
576MODULE_DESCRIPTION("HCTR2 length-preserving encryption mode");
577MODULE_LICENSE("GPL v2");
578MODULE_ALIAS_CRYPTO("hctr2");
579MODULE_IMPORT_NS(CRYPTO_INTERNAL);
580