aesni_wrap.c revision 247061
1/*-
2 * Copyright (C) 2008 Damien Miller <djm@mindrot.org>
3 * Copyright (c) 2010 Konstantin Belousov <kib@FreeBSD.org>
4 * Copyright (c) 2010-2011 Pawel Jakub Dawidek <pawel@dawidek.net>
5 * All rights reserved.
6 *
7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions
9 * are met:
10 * 1. Redistributions of source code must retain the above copyright
11 *    notice, this list of conditions and the following disclaimer.
12 * 2. Redistributions in binary form must reproduce the above copyright
13 *    notice, this list of conditions and the following disclaimer in the
14 *    documentation and/or other materials provided with the distribution.
15 *
16 * THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS ``AS IS'' AND
17 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
18 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
19 * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE
20 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
21 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
22 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
23 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
24 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
25 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
26 * SUCH DAMAGE.
27 */
28
29#include <sys/cdefs.h>
30__FBSDID("$FreeBSD: head/sys/crypto/aesni/aesni_wrap.c 247061 2013-02-20 22:59:53Z pjd $");
31
32#include <sys/param.h>
33#include <sys/libkern.h>
34#include <sys/malloc.h>
35#include <sys/proc.h>
36#include <sys/systm.h>
37#include <crypto/aesni/aesni.h>
38
39MALLOC_DECLARE(M_AESNI);
40
41void
42aesni_encrypt_cbc(int rounds, const void *key_schedule, size_t len,
43    const uint8_t *from, uint8_t *to, const uint8_t iv[AES_BLOCK_LEN])
44{
45	const uint8_t *ivp;
46	size_t i;
47
48	len /= AES_BLOCK_LEN;
49	ivp = iv;
50	for (i = 0; i < len; i++) {
51		aesni_enc(rounds - 1, key_schedule, from, to, ivp);
52		ivp = to;
53		from += AES_BLOCK_LEN;
54		to += AES_BLOCK_LEN;
55	}
56}
57
58void
59aesni_encrypt_ecb(int rounds, const void *key_schedule, size_t len,
60    const uint8_t from[AES_BLOCK_LEN], uint8_t to[AES_BLOCK_LEN])
61{
62	size_t i;
63
64	len /= AES_BLOCK_LEN;
65	for (i = 0; i < len; i++) {
66		aesni_enc(rounds - 1, key_schedule, from, to, NULL);
67		from += AES_BLOCK_LEN;
68		to += AES_BLOCK_LEN;
69	}
70}
71
72void
73aesni_decrypt_ecb(int rounds, const void *key_schedule, size_t len,
74    const uint8_t from[AES_BLOCK_LEN], uint8_t to[AES_BLOCK_LEN])
75{
76	size_t i;
77
78	len /= AES_BLOCK_LEN;
79	for (i = 0; i < len; i++) {
80		aesni_dec(rounds - 1, key_schedule, from, to, NULL);
81		from += AES_BLOCK_LEN;
82		to += AES_BLOCK_LEN;
83	}
84}
85
86#define	AES_XTS_BLOCKSIZE	16
87#define	AES_XTS_IVSIZE		8
88#define	AES_XTS_ALPHA		0x87	/* GF(2^128) generator polynomial */
89
90static void
91aesni_crypt_xts_block(int rounds, const void *key_schedule, uint64_t *tweak,
92    const uint64_t *from, uint64_t *to, uint64_t *block, int do_encrypt)
93{
94	int carry;
95
96	block[0] = from[0] ^ tweak[0];
97	block[1] = from[1] ^ tweak[1];
98
99	if (do_encrypt)
100		aesni_enc(rounds - 1, key_schedule, (uint8_t *)block, (uint8_t *)to, NULL);
101	else
102		aesni_dec(rounds - 1, key_schedule, (uint8_t *)block, (uint8_t *)to, NULL);
103
104	to[0] ^= tweak[0];
105	to[1] ^= tweak[1];
106
107	/* Exponentiate tweak. */
108	carry = ((tweak[0] & 0x8000000000000000ULL) > 0);
109	tweak[0] <<= 1;
110	if (tweak[1] & 0x8000000000000000ULL) {
111		uint8_t *twk = (uint8_t *)tweak;
112
113		twk[0] ^= AES_XTS_ALPHA;
114	}
115	tweak[1] <<= 1;
116	if (carry)
117		tweak[1] |= 1;
118}
119
120static void
121aesni_crypt_xts(int rounds, const void *data_schedule,
122    const void *tweak_schedule, size_t len, const uint8_t *from, uint8_t *to,
123    const uint8_t iv[AES_BLOCK_LEN], int do_encrypt)
124{
125	uint64_t block[AES_XTS_BLOCKSIZE / 8];
126	uint8_t tweak[AES_XTS_BLOCKSIZE];
127	size_t i;
128
129	/*
130	 * Prepare tweak as E_k2(IV). IV is specified as LE representation
131	 * of a 64-bit block number which we allow to be passed in directly.
132	 */
133#if BYTE_ORDER == LITTLE_ENDIAN
134	bcopy(iv, tweak, AES_XTS_IVSIZE);
135	/* Last 64 bits of IV are always zero. */
136	bzero(tweak + AES_XTS_IVSIZE, AES_XTS_IVSIZE);
137#else
138#error Only LITTLE_ENDIAN architectures are supported.
139#endif
140	aesni_enc(rounds - 1, tweak_schedule, tweak, tweak, NULL);
141
142	len /= AES_XTS_BLOCKSIZE;
143	for (i = 0; i < len; i++) {
144		aesni_crypt_xts_block(rounds, data_schedule, (uint64_t *)tweak,
145		    (const uint64_t *)from, (uint64_t *)to, block, do_encrypt);
146		from += AES_XTS_BLOCKSIZE;
147		to += AES_XTS_BLOCKSIZE;
148	}
149
150	bzero(tweak, sizeof(tweak));
151	bzero(block, sizeof(block));
152}
153
154static void
155aesni_encrypt_xts(int rounds, const void *data_schedule,
156    const void *tweak_schedule, size_t len, const uint8_t *from, uint8_t *to,
157    const uint8_t iv[AES_BLOCK_LEN])
158{
159
160	aesni_crypt_xts(rounds, data_schedule, tweak_schedule, len, from, to,
161	    iv, 1);
162}
163
164static void
165aesni_decrypt_xts(int rounds, const void *data_schedule,
166    const void *tweak_schedule, size_t len, const uint8_t *from, uint8_t *to,
167    const uint8_t iv[AES_BLOCK_LEN])
168{
169
170	aesni_crypt_xts(rounds, data_schedule, tweak_schedule, len, from, to,
171	    iv, 0);
172}
173
174static int
175aesni_cipher_setup_common(struct aesni_session *ses, const uint8_t *key,
176    int keylen)
177{
178
179	switch (ses->algo) {
180	case CRYPTO_AES_CBC:
181		switch (keylen) {
182		case 128:
183			ses->rounds = AES128_ROUNDS;
184			break;
185		case 192:
186			ses->rounds = AES192_ROUNDS;
187			break;
188		case 256:
189			ses->rounds = AES256_ROUNDS;
190			break;
191		default:
192			return (EINVAL);
193		}
194		break;
195	case CRYPTO_AES_XTS:
196		switch (keylen) {
197		case 256:
198			ses->rounds = AES128_ROUNDS;
199			break;
200		case 512:
201			ses->rounds = AES256_ROUNDS;
202			break;
203		default:
204			return (EINVAL);
205		}
206		break;
207	default:
208		return (EINVAL);
209	}
210
211	aesni_set_enckey(key, ses->enc_schedule, ses->rounds);
212	aesni_set_deckey(ses->enc_schedule, ses->dec_schedule, ses->rounds);
213	if (ses->algo == CRYPTO_AES_CBC)
214		arc4rand(ses->iv, sizeof(ses->iv), 0);
215	else /* if (ses->algo == CRYPTO_AES_XTS) */ {
216		aesni_set_enckey(key + keylen / 16, ses->xts_schedule,
217		    ses->rounds);
218	}
219
220	return (0);
221}
222
223int
224aesni_cipher_setup(struct aesni_session *ses, struct cryptoini *encini)
225{
226	struct thread *td;
227	int error, saved_ctx;
228
229	td = curthread;
230	if (!is_fpu_kern_thread(0)) {
231		error = fpu_kern_enter(td, ses->fpu_ctx, FPU_KERN_NORMAL);
232		saved_ctx = 1;
233	} else {
234		error = 0;
235		saved_ctx = 0;
236	}
237	if (error == 0) {
238		error = aesni_cipher_setup_common(ses, encini->cri_key,
239		    encini->cri_klen);
240		if (saved_ctx)
241			fpu_kern_leave(td, ses->fpu_ctx);
242	}
243	return (error);
244}
245
246int
247aesni_cipher_process(struct aesni_session *ses, struct cryptodesc *enccrd,
248    struct cryptop *crp)
249{
250	struct thread *td;
251	uint8_t *buf;
252	int error, allocated, saved_ctx;
253
254	buf = aesni_cipher_alloc(enccrd, crp, &allocated);
255	if (buf == NULL)
256		return (ENOMEM);
257
258	td = curthread;
259	if (!is_fpu_kern_thread(0)) {
260		error = fpu_kern_enter(td, ses->fpu_ctx, FPU_KERN_NORMAL);
261		if (error != 0)
262			goto out;
263		saved_ctx = 1;
264	} else {
265		saved_ctx = 0;
266		error = 0;
267	}
268
269	if ((enccrd->crd_flags & CRD_F_KEY_EXPLICIT) != 0) {
270		error = aesni_cipher_setup_common(ses, enccrd->crd_key,
271		    enccrd->crd_klen);
272		if (error != 0)
273			goto out;
274	}
275
276	if ((enccrd->crd_flags & CRD_F_ENCRYPT) != 0) {
277		if ((enccrd->crd_flags & CRD_F_IV_EXPLICIT) != 0)
278			bcopy(enccrd->crd_iv, ses->iv, AES_BLOCK_LEN);
279		if ((enccrd->crd_flags & CRD_F_IV_PRESENT) == 0)
280			crypto_copyback(crp->crp_flags, crp->crp_buf,
281			    enccrd->crd_inject, AES_BLOCK_LEN, ses->iv);
282		if (ses->algo == CRYPTO_AES_CBC) {
283			aesni_encrypt_cbc(ses->rounds, ses->enc_schedule,
284			    enccrd->crd_len, buf, buf, ses->iv);
285		} else /* if (ses->algo == CRYPTO_AES_XTS) */ {
286			aesni_encrypt_xts(ses->rounds, ses->enc_schedule,
287			    ses->xts_schedule, enccrd->crd_len, buf, buf,
288			    ses->iv);
289		}
290	} else {
291		if ((enccrd->crd_flags & CRD_F_IV_EXPLICIT) != 0)
292			bcopy(enccrd->crd_iv, ses->iv, AES_BLOCK_LEN);
293		else
294			crypto_copydata(crp->crp_flags, crp->crp_buf,
295			    enccrd->crd_inject, AES_BLOCK_LEN, ses->iv);
296		if (ses->algo == CRYPTO_AES_CBC) {
297			aesni_decrypt_cbc(ses->rounds, ses->dec_schedule,
298			    enccrd->crd_len, buf, ses->iv);
299		} else /* if (ses->algo == CRYPTO_AES_XTS) */ {
300			aesni_decrypt_xts(ses->rounds, ses->dec_schedule,
301			    ses->xts_schedule, enccrd->crd_len, buf, buf,
302			    ses->iv);
303		}
304	}
305	if (saved_ctx)
306		fpu_kern_leave(td, ses->fpu_ctx);
307	if (allocated)
308		crypto_copyback(crp->crp_flags, crp->crp_buf, enccrd->crd_skip,
309		    enccrd->crd_len, buf);
310	if ((enccrd->crd_flags & CRD_F_ENCRYPT) != 0)
311		crypto_copydata(crp->crp_flags, crp->crp_buf,
312		    enccrd->crd_skip + enccrd->crd_len - AES_BLOCK_LEN,
313		    AES_BLOCK_LEN, ses->iv);
314 out:
315	if (allocated) {
316		bzero(buf, enccrd->crd_len);
317		free(buf, M_AESNI);
318	}
319	return (error);
320}
321