aesni_wrap.c revision 247061
1210409Skib/*-
2247061Spjd * Copyright (C) 2008 Damien Miller <djm@mindrot.org>
3210409Skib * Copyright (c) 2010 Konstantin Belousov <kib@FreeBSD.org>
4226839Spjd * Copyright (c) 2010-2011 Pawel Jakub Dawidek <pawel@dawidek.net>
5210409Skib * All rights reserved.
6210409Skib *
7210409Skib * Redistribution and use in source and binary forms, with or without
8210409Skib * modification, are permitted provided that the following conditions
9210409Skib * are met:
10210409Skib * 1. Redistributions of source code must retain the above copyright
11210409Skib *    notice, this list of conditions and the following disclaimer.
12210409Skib * 2. Redistributions in binary form must reproduce the above copyright
13210409Skib *    notice, this list of conditions and the following disclaimer in the
14210409Skib *    documentation and/or other materials provided with the distribution.
15210409Skib *
16210409Skib * THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS ``AS IS'' AND
17210409Skib * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
18210409Skib * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
19210409Skib * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE
20210409Skib * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
21210409Skib * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
22210409Skib * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
23210409Skib * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
24210409Skib * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
25210409Skib * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
26210409Skib * SUCH DAMAGE.
27210409Skib */
28210409Skib
29210409Skib#include <sys/cdefs.h>
30210409Skib__FBSDID("$FreeBSD: head/sys/crypto/aesni/aesni_wrap.c 247061 2013-02-20 22:59:53Z pjd $");
31210409Skib
32210409Skib#include <sys/param.h>
33210409Skib#include <sys/libkern.h>
34210409Skib#include <sys/malloc.h>
35210409Skib#include <sys/proc.h>
36210409Skib#include <sys/systm.h>
37210409Skib#include <crypto/aesni/aesni.h>
38210409Skib
39210409SkibMALLOC_DECLARE(M_AESNI);
40210409Skib
41210409Skibvoid
42210409Skibaesni_encrypt_cbc(int rounds, const void *key_schedule, size_t len,
43210409Skib    const uint8_t *from, uint8_t *to, const uint8_t iv[AES_BLOCK_LEN])
44210409Skib{
45210409Skib	const uint8_t *ivp;
46210409Skib	size_t i;
47210409Skib
48210409Skib	len /= AES_BLOCK_LEN;
49210409Skib	ivp = iv;
50210409Skib	for (i = 0; i < len; i++) {
51210409Skib		aesni_enc(rounds - 1, key_schedule, from, to, ivp);
52210409Skib		ivp = to;
53210409Skib		from += AES_BLOCK_LEN;
54210409Skib		to += AES_BLOCK_LEN;
55210409Skib	}
56210409Skib}
57210409Skib
58210409Skibvoid
59210409Skibaesni_encrypt_ecb(int rounds, const void *key_schedule, size_t len,
60210409Skib    const uint8_t from[AES_BLOCK_LEN], uint8_t to[AES_BLOCK_LEN])
61210409Skib{
62210409Skib	size_t i;
63210409Skib
64210409Skib	len /= AES_BLOCK_LEN;
65210409Skib	for (i = 0; i < len; i++) {
66210409Skib		aesni_enc(rounds - 1, key_schedule, from, to, NULL);
67210409Skib		from += AES_BLOCK_LEN;
68210409Skib		to += AES_BLOCK_LEN;
69210409Skib	}
70210409Skib}
71210409Skib
72210409Skibvoid
73210409Skibaesni_decrypt_ecb(int rounds, const void *key_schedule, size_t len,
74210409Skib    const uint8_t from[AES_BLOCK_LEN], uint8_t to[AES_BLOCK_LEN])
75210409Skib{
76210409Skib	size_t i;
77210409Skib
78210409Skib	len /= AES_BLOCK_LEN;
79210409Skib	for (i = 0; i < len; i++) {
80210409Skib		aesni_dec(rounds - 1, key_schedule, from, to, NULL);
81210409Skib		from += AES_BLOCK_LEN;
82210409Skib		to += AES_BLOCK_LEN;
83210409Skib	}
84210409Skib}
85210409Skib
86213069Spjd#define	AES_XTS_BLOCKSIZE	16
87213069Spjd#define	AES_XTS_IVSIZE		8
88213069Spjd#define	AES_XTS_ALPHA		0x87	/* GF(2^128) generator polynomial */
89213069Spjd
90213069Spjdstatic void
91226837Spjdaesni_crypt_xts_block(int rounds, const void *key_schedule, uint64_t *tweak,
92226837Spjd    const uint64_t *from, uint64_t *to, uint64_t *block, int do_encrypt)
93213069Spjd{
94226837Spjd	int carry;
95213069Spjd
96226837Spjd	block[0] = from[0] ^ tweak[0];
97226837Spjd	block[1] = from[1] ^ tweak[1];
98213069Spjd
99213069Spjd	if (do_encrypt)
100226837Spjd		aesni_enc(rounds - 1, key_schedule, (uint8_t *)block, (uint8_t *)to, NULL);
101213069Spjd	else
102226837Spjd		aesni_dec(rounds - 1, key_schedule, (uint8_t *)block, (uint8_t *)to, NULL);
103213069Spjd
104226837Spjd	to[0] ^= tweak[0];
105226837Spjd	to[1] ^= tweak[1];
106213069Spjd
107213069Spjd	/* Exponentiate tweak. */
108226837Spjd	carry = ((tweak[0] & 0x8000000000000000ULL) > 0);
109226837Spjd	tweak[0] <<= 1;
110226837Spjd	if (tweak[1] & 0x8000000000000000ULL) {
111226837Spjd		uint8_t *twk = (uint8_t *)tweak;
112226837Spjd
113226837Spjd		twk[0] ^= AES_XTS_ALPHA;
114213069Spjd	}
115226837Spjd	tweak[1] <<= 1;
116226837Spjd	if (carry)
117226837Spjd		tweak[1] |= 1;
118213069Spjd}
119213069Spjd
120213069Spjdstatic void
121213069Spjdaesni_crypt_xts(int rounds, const void *data_schedule,
122213069Spjd    const void *tweak_schedule, size_t len, const uint8_t *from, uint8_t *to,
123213069Spjd    const uint8_t iv[AES_BLOCK_LEN], int do_encrypt)
124213069Spjd{
125226837Spjd	uint64_t block[AES_XTS_BLOCKSIZE / 8];
126213069Spjd	uint8_t tweak[AES_XTS_BLOCKSIZE];
127213069Spjd	size_t i;
128213069Spjd
129213069Spjd	/*
130213069Spjd	 * Prepare tweak as E_k2(IV). IV is specified as LE representation
131213069Spjd	 * of a 64-bit block number which we allow to be passed in directly.
132213069Spjd	 */
133226837Spjd#if BYTE_ORDER == LITTLE_ENDIAN
134226837Spjd	bcopy(iv, tweak, AES_XTS_IVSIZE);
135213069Spjd	/* Last 64 bits of IV are always zero. */
136213069Spjd	bzero(tweak + AES_XTS_IVSIZE, AES_XTS_IVSIZE);
137226837Spjd#else
138226837Spjd#error Only LITTLE_ENDIAN architectures are supported.
139226837Spjd#endif
140213069Spjd	aesni_enc(rounds - 1, tweak_schedule, tweak, tweak, NULL);
141213069Spjd
142213069Spjd	len /= AES_XTS_BLOCKSIZE;
143213069Spjd	for (i = 0; i < len; i++) {
144226837Spjd		aesni_crypt_xts_block(rounds, data_schedule, (uint64_t *)tweak,
145226837Spjd		    (const uint64_t *)from, (uint64_t *)to, block, do_encrypt);
146213069Spjd		from += AES_XTS_BLOCKSIZE;
147213069Spjd		to += AES_XTS_BLOCKSIZE;
148213069Spjd	}
149213069Spjd
150213069Spjd	bzero(tweak, sizeof(tweak));
151226837Spjd	bzero(block, sizeof(block));
152213069Spjd}
153213069Spjd
154213069Spjdstatic void
155213069Spjdaesni_encrypt_xts(int rounds, const void *data_schedule,
156213069Spjd    const void *tweak_schedule, size_t len, const uint8_t *from, uint8_t *to,
157213069Spjd    const uint8_t iv[AES_BLOCK_LEN])
158213069Spjd{
159213069Spjd
160213069Spjd	aesni_crypt_xts(rounds, data_schedule, tweak_schedule, len, from, to,
161213069Spjd	    iv, 1);
162213069Spjd}
163213069Spjd
164213069Spjdstatic void
165213069Spjdaesni_decrypt_xts(int rounds, const void *data_schedule,
166213069Spjd    const void *tweak_schedule, size_t len, const uint8_t *from, uint8_t *to,
167213069Spjd    const uint8_t iv[AES_BLOCK_LEN])
168213069Spjd{
169213069Spjd
170213069Spjd	aesni_crypt_xts(rounds, data_schedule, tweak_schedule, len, from, to,
171213069Spjd	    iv, 0);
172213069Spjd}
173213069Spjd
174213066Spjdstatic int
175213066Spjdaesni_cipher_setup_common(struct aesni_session *ses, const uint8_t *key,
176213066Spjd    int keylen)
177210409Skib{
178210409Skib
179213069Spjd	switch (ses->algo) {
180213069Spjd	case CRYPTO_AES_CBC:
181213069Spjd		switch (keylen) {
182213069Spjd		case 128:
183213069Spjd			ses->rounds = AES128_ROUNDS;
184213069Spjd			break;
185213069Spjd		case 192:
186213069Spjd			ses->rounds = AES192_ROUNDS;
187213069Spjd			break;
188213069Spjd		case 256:
189213069Spjd			ses->rounds = AES256_ROUNDS;
190213069Spjd			break;
191213069Spjd		default:
192213069Spjd			return (EINVAL);
193213069Spjd		}
194210409Skib		break;
195213069Spjd	case CRYPTO_AES_XTS:
196213069Spjd		switch (keylen) {
197213069Spjd		case 256:
198213069Spjd			ses->rounds = AES128_ROUNDS;
199213069Spjd			break;
200213069Spjd		case 512:
201213069Spjd			ses->rounds = AES256_ROUNDS;
202213069Spjd			break;
203213069Spjd		default:
204213069Spjd			return (EINVAL);
205213069Spjd		}
206210409Skib		break;
207210409Skib	default:
208210409Skib		return (EINVAL);
209210409Skib	}
210213069Spjd
211213066Spjd	aesni_set_enckey(key, ses->enc_schedule, ses->rounds);
212213066Spjd	aesni_set_deckey(ses->enc_schedule, ses->dec_schedule, ses->rounds);
213213166Spjd	if (ses->algo == CRYPTO_AES_CBC)
214213069Spjd		arc4rand(ses->iv, sizeof(ses->iv), 0);
215213069Spjd	else /* if (ses->algo == CRYPTO_AES_XTS) */ {
216213069Spjd		aesni_set_enckey(key + keylen / 16, ses->xts_schedule,
217213069Spjd		    ses->rounds);
218213069Spjd	}
219210409Skib
220213066Spjd	return (0);
221210409Skib}
222210409Skib
223210409Skibint
224213066Spjdaesni_cipher_setup(struct aesni_session *ses, struct cryptoini *encini)
225213066Spjd{
226213066Spjd	struct thread *td;
227215427Skib	int error, saved_ctx;
228213066Spjd
229213069Spjd	td = curthread;
230215427Skib	if (!is_fpu_kern_thread(0)) {
231230426Skib		error = fpu_kern_enter(td, ses->fpu_ctx, FPU_KERN_NORMAL);
232215427Skib		saved_ctx = 1;
233215427Skib	} else {
234215427Skib		error = 0;
235215427Skib		saved_ctx = 0;
236215427Skib	}
237213069Spjd	if (error == 0) {
238213066Spjd		error = aesni_cipher_setup_common(ses, encini->cri_key,
239213066Spjd		    encini->cri_klen);
240215427Skib		if (saved_ctx)
241230426Skib			fpu_kern_leave(td, ses->fpu_ctx);
242213069Spjd	}
243213069Spjd	return (error);
244213066Spjd}
245213066Spjd
246213066Spjdint
247210409Skibaesni_cipher_process(struct aesni_session *ses, struct cryptodesc *enccrd,
248210409Skib    struct cryptop *crp)
249210409Skib{
250210409Skib	struct thread *td;
251210409Skib	uint8_t *buf;
252215427Skib	int error, allocated, saved_ctx;
253210409Skib
254210409Skib	buf = aesni_cipher_alloc(enccrd, crp, &allocated);
255213064Spjd	if (buf == NULL)
256213064Spjd		return (ENOMEM);
257210409Skib
258210409Skib	td = curthread;
259215427Skib	if (!is_fpu_kern_thread(0)) {
260230426Skib		error = fpu_kern_enter(td, ses->fpu_ctx, FPU_KERN_NORMAL);
261215427Skib		if (error != 0)
262215427Skib			goto out;
263215427Skib		saved_ctx = 1;
264215427Skib	} else {
265215427Skib		saved_ctx = 0;
266215427Skib		error = 0;
267215427Skib	}
268213069Spjd
269213066Spjd	if ((enccrd->crd_flags & CRD_F_KEY_EXPLICIT) != 0) {
270213066Spjd		error = aesni_cipher_setup_common(ses, enccrd->crd_key,
271213066Spjd		    enccrd->crd_klen);
272213066Spjd		if (error != 0)
273213066Spjd			goto out;
274213066Spjd	}
275210409Skib
276210409Skib	if ((enccrd->crd_flags & CRD_F_ENCRYPT) != 0) {
277210409Skib		if ((enccrd->crd_flags & CRD_F_IV_EXPLICIT) != 0)
278210409Skib			bcopy(enccrd->crd_iv, ses->iv, AES_BLOCK_LEN);
279210409Skib		if ((enccrd->crd_flags & CRD_F_IV_PRESENT) == 0)
280210409Skib			crypto_copyback(crp->crp_flags, crp->crp_buf,
281210409Skib			    enccrd->crd_inject, AES_BLOCK_LEN, ses->iv);
282213069Spjd		if (ses->algo == CRYPTO_AES_CBC) {
283213069Spjd			aesni_encrypt_cbc(ses->rounds, ses->enc_schedule,
284213069Spjd			    enccrd->crd_len, buf, buf, ses->iv);
285213069Spjd		} else /* if (ses->algo == CRYPTO_AES_XTS) */ {
286213069Spjd			aesni_encrypt_xts(ses->rounds, ses->enc_schedule,
287213069Spjd			    ses->xts_schedule, enccrd->crd_len, buf, buf,
288213069Spjd			    ses->iv);
289213069Spjd		}
290210409Skib	} else {
291210409Skib		if ((enccrd->crd_flags & CRD_F_IV_EXPLICIT) != 0)
292210409Skib			bcopy(enccrd->crd_iv, ses->iv, AES_BLOCK_LEN);
293210409Skib		else
294210409Skib			crypto_copydata(crp->crp_flags, crp->crp_buf,
295210409Skib			    enccrd->crd_inject, AES_BLOCK_LEN, ses->iv);
296213069Spjd		if (ses->algo == CRYPTO_AES_CBC) {
297213069Spjd			aesni_decrypt_cbc(ses->rounds, ses->dec_schedule,
298213069Spjd			    enccrd->crd_len, buf, ses->iv);
299213069Spjd		} else /* if (ses->algo == CRYPTO_AES_XTS) */ {
300213166Spjd			aesni_decrypt_xts(ses->rounds, ses->dec_schedule,
301213069Spjd			    ses->xts_schedule, enccrd->crd_len, buf, buf,
302213069Spjd			    ses->iv);
303213069Spjd		}
304210409Skib	}
305215427Skib	if (saved_ctx)
306230426Skib		fpu_kern_leave(td, ses->fpu_ctx);
307210409Skib	if (allocated)
308210409Skib		crypto_copyback(crp->crp_flags, crp->crp_buf, enccrd->crd_skip,
309210409Skib		    enccrd->crd_len, buf);
310210409Skib	if ((enccrd->crd_flags & CRD_F_ENCRYPT) != 0)
311210409Skib		crypto_copydata(crp->crp_flags, crp->crp_buf,
312210409Skib		    enccrd->crd_skip + enccrd->crd_len - AES_BLOCK_LEN,
313210409Skib		    AES_BLOCK_LEN, ses->iv);
314213064Spjd out:
315210409Skib	if (allocated) {
316210409Skib		bzero(buf, enccrd->crd_len);
317210409Skib		free(buf, M_AESNI);
318210409Skib	}
319210409Skib	return (error);
320210409Skib}
321