aes.c revision 10500:a10fbcfc2f21
1/*
2 * CDDL HEADER START
3 *
4 * The contents of this file are subject to the terms of the
5 * Common Development and Distribution License (the "License").
6 * You may not use this file except in compliance with the License.
7 *
8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9 * or http://www.opensolaris.org/os/licensing.
10 * See the License for the specific language governing permissions
11 * and limitations under the License.
12 *
13 * When distributing Covered Code, include this CDDL HEADER in each
14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15 * If applicable, add the following below this CDDL HEADER, with the
16 * fields enclosed by brackets "[]" replaced with your own identifying
17 * information: Portions Copyright [yyyy] [name of copyright owner]
18 *
19 * CDDL HEADER END
20 */
21/*
22 * Copyright 2009 Sun Microsystems, Inc.  All rights reserved.
23 * Use is subject to license terms.
24 */
25
26/*
27 * AES provider for the Kernel Cryptographic Framework (KCF)
28 */
29
30#include <sys/types.h>
31#include <sys/systm.h>
32#include <sys/modctl.h>
33#include <sys/cmn_err.h>
34#include <sys/ddi.h>
35#include <sys/crypto/common.h>
36#include <sys/crypto/impl.h>
37#include <sys/crypto/spi.h>
38#include <sys/sysmacros.h>
39#include <sys/strsun.h>
40#include <modes/modes.h>
41#define	_AES_FIPS_POST
42#define	_AES_IMPL
43#include <aes/aes_impl.h>
44
45extern struct mod_ops mod_cryptoops;
46
47/*
48 * Module linkage information for the kernel.
49 */
50static struct modlcrypto modlcrypto = {
51	&mod_cryptoops,
52	"AES Kernel SW Provider"
53};
54
55static struct modlinkage modlinkage = {
56	MODREV_1,
57	(void *)&modlcrypto,
58	NULL
59};
60
61/*
62 * The following definitions are to keep EXPORT_SRC happy.
63 */
64#ifndef AES_MIN_KEY_BYTES
65#define	AES_MIN_KEY_BYTES		0
66#endif
67
68#ifndef AES_MAX_KEY_BYTES
69#define	AES_MAX_KEY_BYTES		0
70#endif
71
72/*
73 * Mechanism info structure passed to KCF during registration.
74 */
75static crypto_mech_info_t aes_mech_info_tab[] = {
76	/* AES_ECB */
77	{SUN_CKM_AES_ECB, AES_ECB_MECH_INFO_TYPE,
78	    CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
79	    CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
80	    AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
81	/* AES_CBC */
82	{SUN_CKM_AES_CBC, AES_CBC_MECH_INFO_TYPE,
83	    CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
84	    CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
85	    AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
86	/* AES_CTR */
87	{SUN_CKM_AES_CTR, AES_CTR_MECH_INFO_TYPE,
88	    CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
89	    CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
90	    AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
91	/* AES_CCM */
92	{SUN_CKM_AES_CCM, AES_CCM_MECH_INFO_TYPE,
93	    CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
94	    CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
95	    AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
96	/* AES_GCM */
97	{SUN_CKM_AES_GCM, AES_GCM_MECH_INFO_TYPE,
98	    CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
99	    CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
100	    AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
101	/* AES_GMAC */
102	{SUN_CKM_AES_GMAC, AES_GMAC_MECH_INFO_TYPE,
103	    CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
104	    CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC |
105	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC |
106	    CRYPTO_FG_SIGN | CRYPTO_FG_SIGN_ATOMIC |
107	    CRYPTO_FG_VERIFY | CRYPTO_FG_VERIFY_ATOMIC,
108	    AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES}
109};
110
111/* operations are in-place if the output buffer is NULL */
112#define	AES_ARG_INPLACE(input, output)				\
113	if ((output) == NULL)					\
114		(output) = (input);
115
116static void aes_provider_status(crypto_provider_handle_t, uint_t *);
117
118static crypto_control_ops_t aes_control_ops = {
119	aes_provider_status
120};
121
122static int aes_encrypt_init(crypto_ctx_t *, crypto_mechanism_t *,
123    crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
124static int aes_decrypt_init(crypto_ctx_t *, crypto_mechanism_t *,
125    crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
126static int aes_common_init(crypto_ctx_t *, crypto_mechanism_t *,
127    crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t, boolean_t);
128static int aes_common_init_ctx(aes_ctx_t *, crypto_spi_ctx_template_t *,
129    crypto_mechanism_t *, crypto_key_t *, int, boolean_t);
130static int aes_encrypt_final(crypto_ctx_t *, crypto_data_t *,
131    crypto_req_handle_t);
132static int aes_decrypt_final(crypto_ctx_t *, crypto_data_t *,
133    crypto_req_handle_t);
134
135static int aes_encrypt(crypto_ctx_t *, crypto_data_t *, crypto_data_t *,
136    crypto_req_handle_t);
137static int aes_encrypt_update(crypto_ctx_t *, crypto_data_t *,
138    crypto_data_t *, crypto_req_handle_t);
139static int aes_encrypt_atomic(crypto_provider_handle_t, crypto_session_id_t,
140    crypto_mechanism_t *, crypto_key_t *, crypto_data_t *,
141    crypto_data_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
142
143static int aes_decrypt(crypto_ctx_t *, crypto_data_t *, crypto_data_t *,
144    crypto_req_handle_t);
145static int aes_decrypt_update(crypto_ctx_t *, crypto_data_t *,
146    crypto_data_t *, crypto_req_handle_t);
147static int aes_decrypt_atomic(crypto_provider_handle_t, crypto_session_id_t,
148    crypto_mechanism_t *, crypto_key_t *, crypto_data_t *,
149    crypto_data_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
150
151static crypto_cipher_ops_t aes_cipher_ops = {
152	aes_encrypt_init,
153	aes_encrypt,
154	aes_encrypt_update,
155	aes_encrypt_final,
156	aes_encrypt_atomic,
157	aes_decrypt_init,
158	aes_decrypt,
159	aes_decrypt_update,
160	aes_decrypt_final,
161	aes_decrypt_atomic
162};
163
164static int aes_mac_atomic(crypto_provider_handle_t, crypto_session_id_t,
165    crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *,
166    crypto_spi_ctx_template_t, crypto_req_handle_t);
167static int aes_mac_verify_atomic(crypto_provider_handle_t, crypto_session_id_t,
168    crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *,
169    crypto_spi_ctx_template_t, crypto_req_handle_t);
170
171static crypto_mac_ops_t aes_mac_ops = {
172	NULL,
173	NULL,
174	NULL,
175	NULL,
176	aes_mac_atomic,
177	aes_mac_verify_atomic
178};
179
180static int aes_create_ctx_template(crypto_provider_handle_t,
181    crypto_mechanism_t *, crypto_key_t *, crypto_spi_ctx_template_t *,
182    size_t *, crypto_req_handle_t);
183static int aes_free_context(crypto_ctx_t *);
184
185static crypto_ctx_ops_t aes_ctx_ops = {
186	aes_create_ctx_template,
187	aes_free_context
188};
189
190static crypto_ops_t aes_crypto_ops = {
191	&aes_control_ops,
192	NULL,
193	&aes_cipher_ops,
194	&aes_mac_ops,
195	NULL,
196	NULL,
197	NULL,
198	NULL,
199	NULL,
200	NULL,
201	NULL,
202	NULL,
203	NULL,
204	&aes_ctx_ops
205};
206
207static crypto_provider_info_t aes_prov_info = {
208	CRYPTO_SPI_VERSION_1,
209	"AES Software Provider",
210	CRYPTO_SW_PROVIDER,
211	{&modlinkage},
212	NULL,
213	&aes_crypto_ops,
214	sizeof (aes_mech_info_tab)/sizeof (crypto_mech_info_t),
215	aes_mech_info_tab
216};
217
218static crypto_kcf_provider_handle_t aes_prov_handle = NULL;
219static crypto_data_t null_crypto_data = { CRYPTO_DATA_RAW };
220
221int
222_init(void)
223{
224	int ret;
225
226	/*
227	 * Register with KCF. If the registration fails, return error.
228	 */
229	if ((ret = crypto_register_provider(&aes_prov_info,
230	    &aes_prov_handle)) != CRYPTO_SUCCESS) {
231		cmn_err(CE_WARN, "%s _init: crypto_register_provider()"
232		    "failed (0x%x)", CRYPTO_PROVIDER_NAME, ret);
233		return (EACCES);
234	}
235
236	if ((ret = mod_install(&modlinkage)) != 0) {
237		int rv;
238
239		ASSERT(aes_prov_handle != NULL);
240		/* We should not return if the unregister returns busy. */
241		while ((rv = crypto_unregister_provider(aes_prov_handle))
242		    == CRYPTO_BUSY) {
243			cmn_err(CE_WARN,
244			    "%s _init: crypto_unregister_provider() "
245			    "failed (0x%x). Retrying.",
246			    CRYPTO_PROVIDER_NAME, rv);
247			/* wait 10 seconds and try again. */
248			delay(10 * drv_usectohz(1000000));
249		}
250	}
251
252	return (ret);
253}
254
255int
256_fini(void)
257{
258	int ret;
259
260	/*
261	 * Unregister from KCF if previous registration succeeded.
262	 */
263	if (aes_prov_handle != NULL) {
264		if ((ret = crypto_unregister_provider(aes_prov_handle)) !=
265		    CRYPTO_SUCCESS) {
266			cmn_err(CE_WARN,
267			    "%s _fini: crypto_unregister_provider() "
268			    "failed (0x%x)", CRYPTO_PROVIDER_NAME, ret);
269			return (EBUSY);
270		}
271		aes_prov_handle = NULL;
272	}
273
274	return (mod_remove(&modlinkage));
275}
276
277int
278_info(struct modinfo *modinfop)
279{
280	return (mod_info(&modlinkage, modinfop));
281}
282
283
284static int
285aes_check_mech_param(crypto_mechanism_t *mechanism, aes_ctx_t **ctx, int kmflag)
286{
287	void *p = NULL;
288	boolean_t param_required = B_TRUE;
289	size_t param_len;
290	void *(*alloc_fun)(int);
291	int rv = CRYPTO_SUCCESS;
292
293	switch (mechanism->cm_type) {
294	case AES_ECB_MECH_INFO_TYPE:
295		param_required = B_FALSE;
296		alloc_fun = ecb_alloc_ctx;
297		break;
298	case AES_CBC_MECH_INFO_TYPE:
299		param_len = AES_BLOCK_LEN;
300		alloc_fun = cbc_alloc_ctx;
301		break;
302	case AES_CTR_MECH_INFO_TYPE:
303		param_len = sizeof (CK_AES_CTR_PARAMS);
304		alloc_fun = ctr_alloc_ctx;
305		break;
306	case AES_CCM_MECH_INFO_TYPE:
307		param_len = sizeof (CK_AES_CCM_PARAMS);
308		alloc_fun = ccm_alloc_ctx;
309		break;
310	case AES_GCM_MECH_INFO_TYPE:
311		param_len = sizeof (CK_AES_GCM_PARAMS);
312		alloc_fun = gcm_alloc_ctx;
313		break;
314	case AES_GMAC_MECH_INFO_TYPE:
315		param_len = sizeof (CK_AES_GMAC_PARAMS);
316		alloc_fun = gmac_alloc_ctx;
317		break;
318	default:
319		rv = CRYPTO_MECHANISM_INVALID;
320	}
321	if (param_required && mechanism->cm_param != NULL &&
322	    mechanism->cm_param_len != param_len) {
323		rv = CRYPTO_MECHANISM_PARAM_INVALID;
324	}
325	if (ctx != NULL) {
326		p = (alloc_fun)(kmflag);
327		*ctx = p;
328	}
329	return (rv);
330}
331
332/* EXPORT DELETE START */
333
334/*
335 * Initialize key schedules for AES
336 */
337static int
338init_keysched(crypto_key_t *key, void *newbie)
339{
340	/*
341	 * Only keys by value are supported by this module.
342	 */
343	switch (key->ck_format) {
344	case CRYPTO_KEY_RAW:
345		if (key->ck_length < AES_MINBITS ||
346		    key->ck_length > AES_MAXBITS) {
347			return (CRYPTO_KEY_SIZE_RANGE);
348		}
349
350		/* key length must be either 128, 192, or 256 */
351		if ((key->ck_length & 63) != 0)
352			return (CRYPTO_KEY_SIZE_RANGE);
353		break;
354	default:
355		return (CRYPTO_KEY_TYPE_INCONSISTENT);
356	}
357
358	aes_init_keysched(key->ck_data, key->ck_length, newbie);
359	return (CRYPTO_SUCCESS);
360}
361
362/* EXPORT DELETE END */
363
364/*
365 * KCF software provider control entry points.
366 */
367/* ARGSUSED */
368static void
369aes_provider_status(crypto_provider_handle_t provider, uint_t *status)
370{
371	*status = CRYPTO_PROVIDER_READY;
372}
373
374static int
375aes_encrypt_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
376    crypto_key_t *key, crypto_spi_ctx_template_t template,
377    crypto_req_handle_t req) {
378	return (aes_common_init(ctx, mechanism, key, template, req, B_TRUE));
379}
380
381static int
382aes_decrypt_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
383    crypto_key_t *key, crypto_spi_ctx_template_t template,
384    crypto_req_handle_t req) {
385	return (aes_common_init(ctx, mechanism, key, template, req, B_FALSE));
386}
387
388
389
390/*
391 * KCF software provider encrypt entry points.
392 */
393static int
394aes_common_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
395    crypto_key_t *key, crypto_spi_ctx_template_t template,
396    crypto_req_handle_t req, boolean_t is_encrypt_init)
397{
398
399/* EXPORT DELETE START */
400
401	aes_ctx_t *aes_ctx;
402	int rv;
403	int kmflag;
404
405	/*
406	 * Only keys by value are supported by this module.
407	 */
408	if (key->ck_format != CRYPTO_KEY_RAW) {
409		return (CRYPTO_KEY_TYPE_INCONSISTENT);
410	}
411
412	kmflag = crypto_kmflag(req);
413	if ((rv = aes_check_mech_param(mechanism, &aes_ctx, kmflag))
414	    != CRYPTO_SUCCESS)
415		return (rv);
416
417	rv = aes_common_init_ctx(aes_ctx, template, mechanism, key, kmflag,
418	    is_encrypt_init);
419	if (rv != CRYPTO_SUCCESS) {
420		crypto_free_mode_ctx(aes_ctx);
421		return (rv);
422	}
423
424	ctx->cc_provider_private = aes_ctx;
425
426/* EXPORT DELETE END */
427
428	return (CRYPTO_SUCCESS);
429}
430
431static void
432aes_copy_block64(uint8_t *in, uint64_t *out)
433{
434	if (IS_P2ALIGNED(in, sizeof (uint64_t))) {
435		/* LINTED: pointer alignment */
436		out[0] = *(uint64_t *)&in[0];
437		/* LINTED: pointer alignment */
438		out[1] = *(uint64_t *)&in[8];
439	} else {
440		uint8_t *iv8 = (uint8_t *)&out[0];
441
442		AES_COPY_BLOCK(in, iv8);
443	}
444}
445
446
447static int
448aes_encrypt(crypto_ctx_t *ctx, crypto_data_t *plaintext,
449    crypto_data_t *ciphertext, crypto_req_handle_t req)
450{
451	int ret = CRYPTO_FAILED;
452
453/* EXPORT DELETE START */
454
455	aes_ctx_t *aes_ctx;
456	size_t saved_length, saved_offset, length_needed;
457
458	ASSERT(ctx->cc_provider_private != NULL);
459	aes_ctx = ctx->cc_provider_private;
460
461	/*
462	 * For block ciphers, plaintext must be a multiple of AES block size.
463	 * This test is only valid for ciphers whose blocksize is a power of 2.
464	 */
465	if (((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE))
466	    == 0) && (plaintext->cd_length & (AES_BLOCK_LEN - 1)) != 0)
467		return (CRYPTO_DATA_LEN_RANGE);
468
469	AES_ARG_INPLACE(plaintext, ciphertext);
470
471	/*
472	 * We need to just return the length needed to store the output.
473	 * We should not destroy the context for the following case.
474	 */
475	switch (aes_ctx->ac_flags & (CCM_MODE|GCM_MODE|GMAC_MODE)) {
476	case CCM_MODE:
477		length_needed = plaintext->cd_length + aes_ctx->ac_mac_len;
478		break;
479	case GCM_MODE:
480		length_needed = plaintext->cd_length + aes_ctx->ac_tag_len;
481		break;
482	case GMAC_MODE:
483		if (plaintext->cd_length != 0)
484			return (CRYPTO_ARGUMENTS_BAD);
485
486		length_needed = aes_ctx->ac_tag_len;
487		break;
488	default:
489		length_needed = plaintext->cd_length;
490	}
491
492	if (ciphertext->cd_length < length_needed) {
493		ciphertext->cd_length = length_needed;
494		return (CRYPTO_BUFFER_TOO_SMALL);
495	}
496
497	saved_length = ciphertext->cd_length;
498	saved_offset = ciphertext->cd_offset;
499
500	/*
501	 * Do an update on the specified input data.
502	 */
503	ret = aes_encrypt_update(ctx, plaintext, ciphertext, req);
504	if (ret != CRYPTO_SUCCESS) {
505		return (ret);
506	}
507
508	/*
509	 * For CCM mode, aes_ccm_encrypt_final() will take care of any
510	 * left-over unprocessed data, and compute the MAC
511	 */
512	if (aes_ctx->ac_flags & CCM_MODE) {
513		/*
514		 * ccm_encrypt_final() will compute the MAC and append
515		 * it to existing ciphertext. So, need to adjust the left over
516		 * length value accordingly
517		 */
518
519		/* order of following 2 lines MUST not be reversed */
520		ciphertext->cd_offset = ciphertext->cd_length;
521		ciphertext->cd_length = saved_length - ciphertext->cd_length;
522		ret = ccm_encrypt_final((ccm_ctx_t *)aes_ctx, ciphertext,
523		    AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
524		if (ret != CRYPTO_SUCCESS) {
525			return (ret);
526		}
527
528		if (plaintext != ciphertext) {
529			ciphertext->cd_length =
530			    ciphertext->cd_offset - saved_offset;
531		}
532		ciphertext->cd_offset = saved_offset;
533	} else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
534		/*
535		 * gcm_encrypt_final() will compute the MAC and append
536		 * it to existing ciphertext. So, need to adjust the left over
537		 * length value accordingly
538		 */
539
540		/* order of following 2 lines MUST not be reversed */
541		ciphertext->cd_offset = ciphertext->cd_length;
542		ciphertext->cd_length = saved_length - ciphertext->cd_length;
543		ret = gcm_encrypt_final((gcm_ctx_t *)aes_ctx, ciphertext,
544		    AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
545		    aes_xor_block);
546		if (ret != CRYPTO_SUCCESS) {
547			return (ret);
548		}
549
550		if (plaintext != ciphertext) {
551			ciphertext->cd_length =
552			    ciphertext->cd_offset - saved_offset;
553		}
554		ciphertext->cd_offset = saved_offset;
555	}
556
557	ASSERT(aes_ctx->ac_remainder_len == 0);
558	(void) aes_free_context(ctx);
559
560/* EXPORT DELETE END */
561
562	return (ret);
563}
564
565
566static int
567aes_decrypt(crypto_ctx_t *ctx, crypto_data_t *ciphertext,
568    crypto_data_t *plaintext, crypto_req_handle_t req)
569{
570	int ret = CRYPTO_FAILED;
571
572/* EXPORT DELETE START */
573
574	aes_ctx_t *aes_ctx;
575	off_t saved_offset;
576	size_t saved_length, length_needed;
577
578	ASSERT(ctx->cc_provider_private != NULL);
579	aes_ctx = ctx->cc_provider_private;
580
581	/*
582	 * For block ciphers, plaintext must be a multiple of AES block size.
583	 * This test is only valid for ciphers whose blocksize is a power of 2.
584	 */
585	if (((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE))
586	    == 0) && (ciphertext->cd_length & (AES_BLOCK_LEN - 1)) != 0) {
587		return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE);
588	}
589
590	AES_ARG_INPLACE(ciphertext, plaintext);
591
592	/*
593	 * Return length needed to store the output.
594	 * Do not destroy context when plaintext buffer is too small.
595	 *
596	 * CCM:  plaintext is MAC len smaller than cipher text
597	 * GCM:  plaintext is TAG len smaller than cipher text
598	 * GMAC: plaintext length must be zero
599	 */
600	switch (aes_ctx->ac_flags & (CCM_MODE|GCM_MODE|GMAC_MODE)) {
601	case CCM_MODE:
602		length_needed = aes_ctx->ac_processed_data_len;
603		break;
604	case GCM_MODE:
605		length_needed = ciphertext->cd_length - aes_ctx->ac_tag_len;
606		break;
607	case GMAC_MODE:
608		if (plaintext->cd_length != 0)
609			return (CRYPTO_ARGUMENTS_BAD);
610
611		length_needed = 0;
612		break;
613	default:
614		length_needed = ciphertext->cd_length;
615	}
616
617	if (plaintext->cd_length < length_needed) {
618		plaintext->cd_length = length_needed;
619		return (CRYPTO_BUFFER_TOO_SMALL);
620	}
621
622	saved_offset = plaintext->cd_offset;
623	saved_length = plaintext->cd_length;
624
625	/*
626	 * Do an update on the specified input data.
627	 */
628	ret = aes_decrypt_update(ctx, ciphertext, plaintext, req);
629	if (ret != CRYPTO_SUCCESS) {
630		goto cleanup;
631	}
632
633	if (aes_ctx->ac_flags & CCM_MODE) {
634		ASSERT(aes_ctx->ac_processed_data_len == aes_ctx->ac_data_len);
635		ASSERT(aes_ctx->ac_processed_mac_len == aes_ctx->ac_mac_len);
636
637		/* order of following 2 lines MUST not be reversed */
638		plaintext->cd_offset = plaintext->cd_length;
639		plaintext->cd_length = saved_length - plaintext->cd_length;
640
641		ret = ccm_decrypt_final((ccm_ctx_t *)aes_ctx, plaintext,
642		    AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
643		    aes_xor_block);
644		if (ret == CRYPTO_SUCCESS) {
645			if (plaintext != ciphertext) {
646				plaintext->cd_length =
647				    plaintext->cd_offset - saved_offset;
648			}
649		} else {
650			plaintext->cd_length = saved_length;
651		}
652
653		plaintext->cd_offset = saved_offset;
654	} else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
655		/* order of following 2 lines MUST not be reversed */
656		plaintext->cd_offset = plaintext->cd_length;
657		plaintext->cd_length = saved_length - plaintext->cd_length;
658
659		ret = gcm_decrypt_final((gcm_ctx_t *)aes_ctx, plaintext,
660		    AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
661		if (ret == CRYPTO_SUCCESS) {
662			if (plaintext != ciphertext) {
663				plaintext->cd_length =
664				    plaintext->cd_offset - saved_offset;
665			}
666		} else {
667			plaintext->cd_length = saved_length;
668		}
669
670		plaintext->cd_offset = saved_offset;
671	}
672
673	ASSERT(aes_ctx->ac_remainder_len == 0);
674
675cleanup:
676	(void) aes_free_context(ctx);
677
678/* EXPORT DELETE END */
679
680	return (ret);
681}
682
683
684/* ARGSUSED */
685static int
686aes_encrypt_update(crypto_ctx_t *ctx, crypto_data_t *plaintext,
687    crypto_data_t *ciphertext, crypto_req_handle_t req)
688{
689	off_t saved_offset;
690	size_t saved_length, out_len;
691	int ret = CRYPTO_SUCCESS;
692	aes_ctx_t *aes_ctx;
693
694	ASSERT(ctx->cc_provider_private != NULL);
695	aes_ctx = ctx->cc_provider_private;
696
697	AES_ARG_INPLACE(plaintext, ciphertext);
698
699	/* compute number of bytes that will hold the ciphertext */
700	out_len = aes_ctx->ac_remainder_len;
701	out_len += plaintext->cd_length;
702	out_len &= ~(AES_BLOCK_LEN - 1);
703
704	/* return length needed to store the output */
705	if (ciphertext->cd_length < out_len) {
706		ciphertext->cd_length = out_len;
707		return (CRYPTO_BUFFER_TOO_SMALL);
708	}
709
710	saved_offset = ciphertext->cd_offset;
711	saved_length = ciphertext->cd_length;
712
713	/*
714	 * Do the AES update on the specified input data.
715	 */
716	switch (plaintext->cd_format) {
717	case CRYPTO_DATA_RAW:
718		ret = crypto_update_iov(ctx->cc_provider_private,
719		    plaintext, ciphertext, aes_encrypt_contiguous_blocks,
720		    aes_copy_block64);
721		break;
722	case CRYPTO_DATA_UIO:
723		ret = crypto_update_uio(ctx->cc_provider_private,
724		    plaintext, ciphertext, aes_encrypt_contiguous_blocks,
725		    aes_copy_block64);
726		break;
727	case CRYPTO_DATA_MBLK:
728		ret = crypto_update_mp(ctx->cc_provider_private,
729		    plaintext, ciphertext, aes_encrypt_contiguous_blocks,
730		    aes_copy_block64);
731		break;
732	default:
733		ret = CRYPTO_ARGUMENTS_BAD;
734	}
735
736	/*
737	 * Since AES counter mode is a stream cipher, we call
738	 * ctr_mode_final() to pick up any remaining bytes.
739	 * It is an internal function that does not destroy
740	 * the context like *normal* final routines.
741	 */
742	if ((aes_ctx->ac_flags & CTR_MODE) && (aes_ctx->ac_remainder_len > 0)) {
743		ret = ctr_mode_final((ctr_ctx_t *)aes_ctx,
744		    ciphertext, aes_encrypt_block);
745	}
746
747	if (ret == CRYPTO_SUCCESS) {
748		if (plaintext != ciphertext)
749			ciphertext->cd_length =
750			    ciphertext->cd_offset - saved_offset;
751	} else {
752		ciphertext->cd_length = saved_length;
753	}
754	ciphertext->cd_offset = saved_offset;
755
756	return (ret);
757}
758
759
760static int
761aes_decrypt_update(crypto_ctx_t *ctx, crypto_data_t *ciphertext,
762    crypto_data_t *plaintext, crypto_req_handle_t req)
763{
764	off_t saved_offset;
765	size_t saved_length, out_len;
766	int ret = CRYPTO_SUCCESS;
767	aes_ctx_t *aes_ctx;
768
769	ASSERT(ctx->cc_provider_private != NULL);
770	aes_ctx = ctx->cc_provider_private;
771
772	AES_ARG_INPLACE(ciphertext, plaintext);
773
774	/*
775	 * Compute number of bytes that will hold the plaintext.
776	 * This is not necessary for CCM, GCM, and GMAC since these
777	 * mechanisms never return plaintext for update operations.
778	 */
779	if ((aes_ctx->ac_flags & (CCM_MODE|GCM_MODE|GMAC_MODE)) == 0) {
780		out_len = aes_ctx->ac_remainder_len;
781		out_len += ciphertext->cd_length;
782		out_len &= ~(AES_BLOCK_LEN - 1);
783
784		/* return length needed to store the output */
785		if (plaintext->cd_length < out_len) {
786			plaintext->cd_length = out_len;
787			return (CRYPTO_BUFFER_TOO_SMALL);
788		}
789	}
790
791	saved_offset = plaintext->cd_offset;
792	saved_length = plaintext->cd_length;
793
794	if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE))
795		gcm_set_kmflag((gcm_ctx_t *)aes_ctx, crypto_kmflag(req));
796
797	/*
798	 * Do the AES update on the specified input data.
799	 */
800	switch (ciphertext->cd_format) {
801	case CRYPTO_DATA_RAW:
802		ret = crypto_update_iov(ctx->cc_provider_private,
803		    ciphertext, plaintext, aes_decrypt_contiguous_blocks,
804		    aes_copy_block64);
805		break;
806	case CRYPTO_DATA_UIO:
807		ret = crypto_update_uio(ctx->cc_provider_private,
808		    ciphertext, plaintext, aes_decrypt_contiguous_blocks,
809		    aes_copy_block64);
810		break;
811	case CRYPTO_DATA_MBLK:
812		ret = crypto_update_mp(ctx->cc_provider_private,
813		    ciphertext, plaintext, aes_decrypt_contiguous_blocks,
814		    aes_copy_block64);
815		break;
816	default:
817		ret = CRYPTO_ARGUMENTS_BAD;
818	}
819
820	/*
821	 * Since AES counter mode is a stream cipher, we call
822	 * ctr_mode_final() to pick up any remaining bytes.
823	 * It is an internal function that does not destroy
824	 * the context like *normal* final routines.
825	 */
826	if ((aes_ctx->ac_flags & CTR_MODE) && (aes_ctx->ac_remainder_len > 0)) {
827		ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, plaintext,
828		    aes_encrypt_block);
829		if (ret == CRYPTO_DATA_LEN_RANGE)
830			ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE;
831	}
832
833	if (ret == CRYPTO_SUCCESS) {
834		if (ciphertext != plaintext)
835			plaintext->cd_length =
836			    plaintext->cd_offset - saved_offset;
837	} else {
838		plaintext->cd_length = saved_length;
839	}
840	plaintext->cd_offset = saved_offset;
841
842
843	return (ret);
844}
845
846/* ARGSUSED */
847static int
848aes_encrypt_final(crypto_ctx_t *ctx, crypto_data_t *data,
849    crypto_req_handle_t req)
850{
851
852/* EXPORT DELETE START */
853
854	aes_ctx_t *aes_ctx;
855	int ret;
856
857	ASSERT(ctx->cc_provider_private != NULL);
858	aes_ctx = ctx->cc_provider_private;
859
860	if (data->cd_format != CRYPTO_DATA_RAW &&
861	    data->cd_format != CRYPTO_DATA_UIO &&
862	    data->cd_format != CRYPTO_DATA_MBLK) {
863		return (CRYPTO_ARGUMENTS_BAD);
864	}
865
866	if (aes_ctx->ac_flags & CTR_MODE) {
867		if (aes_ctx->ac_remainder_len > 0) {
868			ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, data,
869			    aes_encrypt_block);
870			if (ret != CRYPTO_SUCCESS)
871				return (ret);
872		}
873	} else if (aes_ctx->ac_flags & CCM_MODE) {
874		ret = ccm_encrypt_final((ccm_ctx_t *)aes_ctx, data,
875		    AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
876		if (ret != CRYPTO_SUCCESS) {
877			return (ret);
878		}
879	} else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
880		size_t saved_offset = data->cd_offset;
881
882		ret = gcm_encrypt_final((gcm_ctx_t *)aes_ctx, data,
883		    AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
884		    aes_xor_block);
885		if (ret != CRYPTO_SUCCESS) {
886			return (ret);
887		}
888		data->cd_length = data->cd_offset - saved_offset;
889		data->cd_offset = saved_offset;
890	} else {
891		/*
892		 * There must be no unprocessed plaintext.
893		 * This happens if the length of the last data is
894		 * not a multiple of the AES block length.
895		 */
896		if (aes_ctx->ac_remainder_len > 0) {
897			return (CRYPTO_DATA_LEN_RANGE);
898		}
899		data->cd_length = 0;
900	}
901
902	(void) aes_free_context(ctx);
903
904/* EXPORT DELETE END */
905
906	return (CRYPTO_SUCCESS);
907}
908
909/* ARGSUSED */
910static int
911aes_decrypt_final(crypto_ctx_t *ctx, crypto_data_t *data,
912    crypto_req_handle_t req)
913{
914
915/* EXPORT DELETE START */
916
917	aes_ctx_t *aes_ctx;
918	int ret;
919	off_t saved_offset;
920	size_t saved_length;
921
922	ASSERT(ctx->cc_provider_private != NULL);
923	aes_ctx = ctx->cc_provider_private;
924
925	if (data->cd_format != CRYPTO_DATA_RAW &&
926	    data->cd_format != CRYPTO_DATA_UIO &&
927	    data->cd_format != CRYPTO_DATA_MBLK) {
928		return (CRYPTO_ARGUMENTS_BAD);
929	}
930
931	/*
932	 * There must be no unprocessed ciphertext.
933	 * This happens if the length of the last ciphertext is
934	 * not a multiple of the AES block length.
935	 */
936	if (aes_ctx->ac_remainder_len > 0) {
937		if ((aes_ctx->ac_flags & CTR_MODE) == 0)
938			return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE);
939		else {
940			ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, data,
941			    aes_encrypt_block);
942			if (ret == CRYPTO_DATA_LEN_RANGE)
943				ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE;
944			if (ret != CRYPTO_SUCCESS)
945				return (ret);
946		}
947	}
948
949	if (aes_ctx->ac_flags & CCM_MODE) {
950		/*
951		 * This is where all the plaintext is returned, make sure
952		 * the plaintext buffer is big enough
953		 */
954		size_t pt_len = aes_ctx->ac_data_len;
955		if (data->cd_length < pt_len) {
956			data->cd_length = pt_len;
957			return (CRYPTO_BUFFER_TOO_SMALL);
958		}
959
960		ASSERT(aes_ctx->ac_processed_data_len == pt_len);
961		ASSERT(aes_ctx->ac_processed_mac_len == aes_ctx->ac_mac_len);
962		saved_offset = data->cd_offset;
963		saved_length = data->cd_length;
964		ret = ccm_decrypt_final((ccm_ctx_t *)aes_ctx, data,
965		    AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
966		    aes_xor_block);
967		if (ret == CRYPTO_SUCCESS) {
968			data->cd_length = data->cd_offset - saved_offset;
969		} else {
970			data->cd_length = saved_length;
971		}
972
973		data->cd_offset = saved_offset;
974		if (ret != CRYPTO_SUCCESS) {
975			return (ret);
976		}
977	} else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
978		/*
979		 * This is where all the plaintext is returned, make sure
980		 * the plaintext buffer is big enough
981		 */
982		gcm_ctx_t *ctx = (gcm_ctx_t *)aes_ctx;
983		size_t pt_len = ctx->gcm_processed_data_len - ctx->gcm_tag_len;
984
985		if (data->cd_length < pt_len) {
986			data->cd_length = pt_len;
987			return (CRYPTO_BUFFER_TOO_SMALL);
988		}
989
990		saved_offset = data->cd_offset;
991		saved_length = data->cd_length;
992		ret = gcm_decrypt_final((gcm_ctx_t *)aes_ctx, data,
993		    AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
994		if (ret == CRYPTO_SUCCESS) {
995			data->cd_length = data->cd_offset - saved_offset;
996		} else {
997			data->cd_length = saved_length;
998		}
999
1000		data->cd_offset = saved_offset;
1001		if (ret != CRYPTO_SUCCESS) {
1002			return (ret);
1003		}
1004	}
1005
1006
1007	if ((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE)) == 0) {
1008		data->cd_length = 0;
1009	}
1010
1011	(void) aes_free_context(ctx);
1012
1013/* EXPORT DELETE END */
1014
1015	return (CRYPTO_SUCCESS);
1016}
1017
1018/* ARGSUSED */
1019static int
1020aes_encrypt_atomic(crypto_provider_handle_t provider,
1021    crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1022    crypto_key_t *key, crypto_data_t *plaintext, crypto_data_t *ciphertext,
1023    crypto_spi_ctx_template_t template, crypto_req_handle_t req)
1024{
1025	aes_ctx_t aes_ctx;	/* on the stack */
1026	off_t saved_offset;
1027	size_t saved_length;
1028	size_t length_needed;
1029	int ret;
1030
1031	AES_ARG_INPLACE(plaintext, ciphertext);
1032
1033	/*
1034	 * CTR, CCM, GCM, and GMAC modes do not require that plaintext
1035	 * be a multiple of AES block size.
1036	 */
1037	switch (mechanism->cm_type) {
1038	case AES_CTR_MECH_INFO_TYPE:
1039	case AES_CCM_MECH_INFO_TYPE:
1040	case AES_GCM_MECH_INFO_TYPE:
1041	case AES_GMAC_MECH_INFO_TYPE:
1042		break;
1043	default:
1044		if ((plaintext->cd_length & (AES_BLOCK_LEN - 1)) != 0)
1045			return (CRYPTO_DATA_LEN_RANGE);
1046	}
1047
1048	if ((ret = aes_check_mech_param(mechanism, NULL, 0)) != CRYPTO_SUCCESS)
1049		return (ret);
1050
1051	bzero(&aes_ctx, sizeof (aes_ctx_t));
1052
1053	ret = aes_common_init_ctx(&aes_ctx, template, mechanism, key,
1054	    crypto_kmflag(req), B_TRUE);
1055	if (ret != CRYPTO_SUCCESS)
1056		return (ret);
1057
1058	switch (mechanism->cm_type) {
1059	case AES_CCM_MECH_INFO_TYPE:
1060		length_needed = plaintext->cd_length + aes_ctx.ac_mac_len;
1061		break;
1062	case AES_GMAC_MECH_INFO_TYPE:
1063		if (plaintext->cd_length != 0)
1064			return (CRYPTO_ARGUMENTS_BAD);
1065		/* FALLTHRU */
1066	case AES_GCM_MECH_INFO_TYPE:
1067		length_needed = plaintext->cd_length + aes_ctx.ac_tag_len;
1068		break;
1069	default:
1070		length_needed = plaintext->cd_length;
1071	}
1072
1073	/* return size of buffer needed to store output */
1074	if (ciphertext->cd_length < length_needed) {
1075		ciphertext->cd_length = length_needed;
1076		ret = CRYPTO_BUFFER_TOO_SMALL;
1077		goto out;
1078	}
1079
1080	saved_offset = ciphertext->cd_offset;
1081	saved_length = ciphertext->cd_length;
1082
1083	/*
1084	 * Do an update on the specified input data.
1085	 */
1086	switch (plaintext->cd_format) {
1087	case CRYPTO_DATA_RAW:
1088		ret = crypto_update_iov(&aes_ctx, plaintext, ciphertext,
1089		    aes_encrypt_contiguous_blocks, aes_copy_block64);
1090		break;
1091	case CRYPTO_DATA_UIO:
1092		ret = crypto_update_uio(&aes_ctx, plaintext, ciphertext,
1093		    aes_encrypt_contiguous_blocks, aes_copy_block64);
1094		break;
1095	case CRYPTO_DATA_MBLK:
1096		ret = crypto_update_mp(&aes_ctx, plaintext, ciphertext,
1097		    aes_encrypt_contiguous_blocks, aes_copy_block64);
1098		break;
1099	default:
1100		ret = CRYPTO_ARGUMENTS_BAD;
1101	}
1102
1103	if (ret == CRYPTO_SUCCESS) {
1104		if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) {
1105			ret = ccm_encrypt_final((ccm_ctx_t *)&aes_ctx,
1106			    ciphertext, AES_BLOCK_LEN, aes_encrypt_block,
1107			    aes_xor_block);
1108			if (ret != CRYPTO_SUCCESS)
1109				goto out;
1110			ASSERT(aes_ctx.ac_remainder_len == 0);
1111		} else if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE ||
1112		    mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE) {
1113			ret = gcm_encrypt_final((gcm_ctx_t *)&aes_ctx,
1114			    ciphertext, AES_BLOCK_LEN, aes_encrypt_block,
1115			    aes_copy_block, aes_xor_block);
1116			if (ret != CRYPTO_SUCCESS)
1117				goto out;
1118			ASSERT(aes_ctx.ac_remainder_len == 0);
1119		} else if (mechanism->cm_type == AES_CTR_MECH_INFO_TYPE) {
1120			if (aes_ctx.ac_remainder_len > 0) {
1121				ret = ctr_mode_final((ctr_ctx_t *)&aes_ctx,
1122				    ciphertext, aes_encrypt_block);
1123				if (ret != CRYPTO_SUCCESS)
1124					goto out;
1125			}
1126		} else {
1127			ASSERT(aes_ctx.ac_remainder_len == 0);
1128		}
1129
1130		if (plaintext != ciphertext) {
1131			ciphertext->cd_length =
1132			    ciphertext->cd_offset - saved_offset;
1133		}
1134	} else {
1135		ciphertext->cd_length = saved_length;
1136	}
1137	ciphertext->cd_offset = saved_offset;
1138
1139out:
1140	if (aes_ctx.ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1141		bzero(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1142		kmem_free(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1143	}
1144
1145	return (ret);
1146}
1147
1148/* ARGSUSED */
1149static int
1150aes_decrypt_atomic(crypto_provider_handle_t provider,
1151    crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1152    crypto_key_t *key, crypto_data_t *ciphertext, crypto_data_t *plaintext,
1153    crypto_spi_ctx_template_t template, crypto_req_handle_t req)
1154{
1155	aes_ctx_t aes_ctx;	/* on the stack */
1156	off_t saved_offset;
1157	size_t saved_length;
1158	size_t length_needed;
1159	int ret;
1160
1161	AES_ARG_INPLACE(ciphertext, plaintext);
1162
1163	/*
1164	 * CCM, GCM, CTR, and GMAC modes do not require that ciphertext
1165	 * be a multiple of AES block size.
1166	 */
1167	switch (mechanism->cm_type) {
1168	case AES_CTR_MECH_INFO_TYPE:
1169	case AES_CCM_MECH_INFO_TYPE:
1170	case AES_GCM_MECH_INFO_TYPE:
1171	case AES_GMAC_MECH_INFO_TYPE:
1172		break;
1173	default:
1174		if ((ciphertext->cd_length & (AES_BLOCK_LEN - 1)) != 0)
1175			return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE);
1176	}
1177
1178	if ((ret = aes_check_mech_param(mechanism, NULL, 0)) != CRYPTO_SUCCESS)
1179		return (ret);
1180
1181	bzero(&aes_ctx, sizeof (aes_ctx_t));
1182
1183	ret = aes_common_init_ctx(&aes_ctx, template, mechanism, key,
1184	    crypto_kmflag(req), B_FALSE);
1185	if (ret != CRYPTO_SUCCESS)
1186		return (ret);
1187
1188	switch (mechanism->cm_type) {
1189	case AES_CCM_MECH_INFO_TYPE:
1190		length_needed = aes_ctx.ac_data_len;
1191		break;
1192	case AES_GCM_MECH_INFO_TYPE:
1193		length_needed = ciphertext->cd_length - aes_ctx.ac_tag_len;
1194		break;
1195	case AES_GMAC_MECH_INFO_TYPE:
1196		if (plaintext->cd_length != 0)
1197			return (CRYPTO_ARGUMENTS_BAD);
1198		length_needed = 0;
1199		break;
1200	default:
1201		length_needed = ciphertext->cd_length;
1202	}
1203
1204	/* return size of buffer needed to store output */
1205	if (plaintext->cd_length < length_needed) {
1206		plaintext->cd_length = length_needed;
1207		ret = CRYPTO_BUFFER_TOO_SMALL;
1208		goto out;
1209	}
1210
1211	saved_offset = plaintext->cd_offset;
1212	saved_length = plaintext->cd_length;
1213
1214	if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE ||
1215	    mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE)
1216		gcm_set_kmflag((gcm_ctx_t *)&aes_ctx, crypto_kmflag(req));
1217
1218	/*
1219	 * Do an update on the specified input data.
1220	 */
1221	switch (ciphertext->cd_format) {
1222	case CRYPTO_DATA_RAW:
1223		ret = crypto_update_iov(&aes_ctx, ciphertext, plaintext,
1224		    aes_decrypt_contiguous_blocks, aes_copy_block64);
1225		break;
1226	case CRYPTO_DATA_UIO:
1227		ret = crypto_update_uio(&aes_ctx, ciphertext, plaintext,
1228		    aes_decrypt_contiguous_blocks, aes_copy_block64);
1229		break;
1230	case CRYPTO_DATA_MBLK:
1231		ret = crypto_update_mp(&aes_ctx, ciphertext, plaintext,
1232		    aes_decrypt_contiguous_blocks, aes_copy_block64);
1233		break;
1234	default:
1235		ret = CRYPTO_ARGUMENTS_BAD;
1236	}
1237
1238	if (ret == CRYPTO_SUCCESS) {
1239		if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) {
1240			ASSERT(aes_ctx.ac_processed_data_len
1241			    == aes_ctx.ac_data_len);
1242			ASSERT(aes_ctx.ac_processed_mac_len
1243			    == aes_ctx.ac_mac_len);
1244			ret = ccm_decrypt_final((ccm_ctx_t *)&aes_ctx,
1245			    plaintext, AES_BLOCK_LEN, aes_encrypt_block,
1246			    aes_copy_block, aes_xor_block);
1247			ASSERT(aes_ctx.ac_remainder_len == 0);
1248			if ((ret == CRYPTO_SUCCESS) &&
1249			    (ciphertext != plaintext)) {
1250				plaintext->cd_length =
1251				    plaintext->cd_offset - saved_offset;
1252			} else {
1253				plaintext->cd_length = saved_length;
1254			}
1255		} else if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE ||
1256		    mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE) {
1257			ret = gcm_decrypt_final((gcm_ctx_t *)&aes_ctx,
1258			    plaintext, AES_BLOCK_LEN, aes_encrypt_block,
1259			    aes_xor_block);
1260			ASSERT(aes_ctx.ac_remainder_len == 0);
1261			if ((ret == CRYPTO_SUCCESS) &&
1262			    (ciphertext != plaintext)) {
1263				plaintext->cd_length =
1264				    plaintext->cd_offset - saved_offset;
1265			} else {
1266				plaintext->cd_length = saved_length;
1267			}
1268		} else if (mechanism->cm_type != AES_CTR_MECH_INFO_TYPE) {
1269			ASSERT(aes_ctx.ac_remainder_len == 0);
1270			if (ciphertext != plaintext)
1271				plaintext->cd_length =
1272				    plaintext->cd_offset - saved_offset;
1273		} else {
1274			if (aes_ctx.ac_remainder_len > 0) {
1275				ret = ctr_mode_final((ctr_ctx_t *)&aes_ctx,
1276				    plaintext, aes_encrypt_block);
1277				if (ret == CRYPTO_DATA_LEN_RANGE)
1278					ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE;
1279				if (ret != CRYPTO_SUCCESS)
1280					goto out;
1281			}
1282			if (ciphertext != plaintext)
1283				plaintext->cd_length =
1284				    plaintext->cd_offset - saved_offset;
1285		}
1286	} else {
1287		plaintext->cd_length = saved_length;
1288	}
1289	plaintext->cd_offset = saved_offset;
1290
1291out:
1292	if (aes_ctx.ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1293		bzero(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1294		kmem_free(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1295	}
1296
1297	if (aes_ctx.ac_flags & CCM_MODE) {
1298		if (aes_ctx.ac_pt_buf != NULL) {
1299			kmem_free(aes_ctx.ac_pt_buf, aes_ctx.ac_data_len);
1300		}
1301	} else if (aes_ctx.ac_flags & (GCM_MODE|GMAC_MODE)) {
1302		if (((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf != NULL) {
1303			kmem_free(((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf,
1304			    ((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf_len);
1305		}
1306	}
1307
1308	return (ret);
1309}
1310
1311/*
1312 * KCF software provider context template entry points.
1313 */
1314/* ARGSUSED */
1315static int
1316aes_create_ctx_template(crypto_provider_handle_t provider,
1317    crypto_mechanism_t *mechanism, crypto_key_t *key,
1318    crypto_spi_ctx_template_t *tmpl, size_t *tmpl_size, crypto_req_handle_t req)
1319{
1320
1321/* EXPORT DELETE START */
1322
1323	void *keysched;
1324	size_t size;
1325	int rv;
1326
1327	if (mechanism->cm_type != AES_ECB_MECH_INFO_TYPE &&
1328	    mechanism->cm_type != AES_CBC_MECH_INFO_TYPE &&
1329	    mechanism->cm_type != AES_CTR_MECH_INFO_TYPE &&
1330	    mechanism->cm_type != AES_CCM_MECH_INFO_TYPE &&
1331	    mechanism->cm_type != AES_GCM_MECH_INFO_TYPE &&
1332	    mechanism->cm_type != AES_GMAC_MECH_INFO_TYPE)
1333		return (CRYPTO_MECHANISM_INVALID);
1334
1335	if ((keysched = aes_alloc_keysched(&size,
1336	    crypto_kmflag(req))) == NULL) {
1337		return (CRYPTO_HOST_MEMORY);
1338	}
1339
1340	/*
1341	 * Initialize key schedule.  Key length information is stored
1342	 * in the key.
1343	 */
1344	if ((rv = init_keysched(key, keysched)) != CRYPTO_SUCCESS) {
1345		bzero(keysched, size);
1346		kmem_free(keysched, size);
1347		return (rv);
1348	}
1349
1350	*tmpl = keysched;
1351	*tmpl_size = size;
1352
1353/* EXPORT DELETE END */
1354
1355	return (CRYPTO_SUCCESS);
1356}
1357
1358
1359static int
1360aes_free_context(crypto_ctx_t *ctx)
1361{
1362
1363/* EXPORT DELETE START */
1364
1365	aes_ctx_t *aes_ctx = ctx->cc_provider_private;
1366
1367	if (aes_ctx != NULL) {
1368		if (aes_ctx->ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1369			ASSERT(aes_ctx->ac_keysched_len != 0);
1370			bzero(aes_ctx->ac_keysched, aes_ctx->ac_keysched_len);
1371			kmem_free(aes_ctx->ac_keysched,
1372			    aes_ctx->ac_keysched_len);
1373		}
1374		crypto_free_mode_ctx(aes_ctx);
1375		ctx->cc_provider_private = NULL;
1376	}
1377
1378/* EXPORT DELETE END */
1379
1380	return (CRYPTO_SUCCESS);
1381}
1382
1383
1384static int
1385aes_common_init_ctx(aes_ctx_t *aes_ctx, crypto_spi_ctx_template_t *template,
1386    crypto_mechanism_t *mechanism, crypto_key_t *key, int kmflag,
1387    boolean_t is_encrypt_init)
1388{
1389	int rv = CRYPTO_SUCCESS;
1390
1391/* EXPORT DELETE START */
1392
1393	void *keysched;
1394	size_t size;
1395
1396	if (template == NULL) {
1397		if ((keysched = aes_alloc_keysched(&size, kmflag)) == NULL)
1398			return (CRYPTO_HOST_MEMORY);
1399		/*
1400		 * Initialize key schedule.
1401		 * Key length is stored in the key.
1402		 */
1403		if ((rv = init_keysched(key, keysched)) != CRYPTO_SUCCESS) {
1404			kmem_free(keysched, size);
1405			return (rv);
1406		}
1407
1408		aes_ctx->ac_flags |= PROVIDER_OWNS_KEY_SCHEDULE;
1409		aes_ctx->ac_keysched_len = size;
1410	} else {
1411		keysched = template;
1412	}
1413	aes_ctx->ac_keysched = keysched;
1414
1415	switch (mechanism->cm_type) {
1416	case AES_CBC_MECH_INFO_TYPE:
1417		rv = cbc_init_ctx((cbc_ctx_t *)aes_ctx, mechanism->cm_param,
1418		    mechanism->cm_param_len, AES_BLOCK_LEN, aes_copy_block64);
1419		break;
1420	case AES_CTR_MECH_INFO_TYPE: {
1421		CK_AES_CTR_PARAMS *pp;
1422
1423		if (mechanism->cm_param == NULL ||
1424		    mechanism->cm_param_len != sizeof (CK_AES_CTR_PARAMS)) {
1425			return (CRYPTO_MECHANISM_PARAM_INVALID);
1426		}
1427		pp = (CK_AES_CTR_PARAMS *)(void *)mechanism->cm_param;
1428		rv = ctr_init_ctx((ctr_ctx_t *)aes_ctx, pp->ulCounterBits,
1429		    pp->cb, aes_copy_block);
1430		break;
1431	}
1432	case AES_CCM_MECH_INFO_TYPE:
1433		if (mechanism->cm_param == NULL ||
1434		    mechanism->cm_param_len != sizeof (CK_AES_CCM_PARAMS)) {
1435			return (CRYPTO_MECHANISM_PARAM_INVALID);
1436		}
1437		rv = ccm_init_ctx((ccm_ctx_t *)aes_ctx, mechanism->cm_param,
1438		    kmflag, is_encrypt_init, AES_BLOCK_LEN, aes_encrypt_block,
1439		    aes_xor_block);
1440		break;
1441	case AES_GCM_MECH_INFO_TYPE:
1442		if (mechanism->cm_param == NULL ||
1443		    mechanism->cm_param_len != sizeof (CK_AES_GCM_PARAMS)) {
1444			return (CRYPTO_MECHANISM_PARAM_INVALID);
1445		}
1446		rv = gcm_init_ctx((gcm_ctx_t *)aes_ctx, mechanism->cm_param,
1447		    AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
1448		    aes_xor_block);
1449		break;
1450	case AES_GMAC_MECH_INFO_TYPE:
1451		if (mechanism->cm_param == NULL ||
1452		    mechanism->cm_param_len != sizeof (CK_AES_GMAC_PARAMS)) {
1453			return (CRYPTO_MECHANISM_PARAM_INVALID);
1454		}
1455		rv = gmac_init_ctx((gcm_ctx_t *)aes_ctx, mechanism->cm_param,
1456		    AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
1457		    aes_xor_block);
1458		break;
1459	case AES_ECB_MECH_INFO_TYPE:
1460		aes_ctx->ac_flags |= ECB_MODE;
1461	}
1462
1463	if (rv != CRYPTO_SUCCESS) {
1464		if (aes_ctx->ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1465			bzero(keysched, size);
1466			kmem_free(keysched, size);
1467		}
1468	}
1469
1470/* EXPORT DELETE END */
1471
1472	return (rv);
1473}
1474
1475static int
1476process_gmac_mech(crypto_mechanism_t *mech, crypto_data_t *data,
1477    CK_AES_GCM_PARAMS *gcm_params)
1478{
1479	/* LINTED: pointer alignment */
1480	CK_AES_GMAC_PARAMS *params = (CK_AES_GMAC_PARAMS *)mech->cm_param;
1481
1482	if (mech->cm_type != AES_GMAC_MECH_INFO_TYPE)
1483		return (CRYPTO_MECHANISM_INVALID);
1484
1485	if (mech->cm_param_len != sizeof (CK_AES_GMAC_PARAMS))
1486		return (CRYPTO_MECHANISM_PARAM_INVALID);
1487
1488	if (params->pIv == NULL)
1489		return (CRYPTO_MECHANISM_PARAM_INVALID);
1490
1491	gcm_params->pIv = params->pIv;
1492	gcm_params->ulIvLen = AES_GMAC_IV_LEN;
1493	gcm_params->ulTagBits = AES_GMAC_TAG_BITS;
1494
1495	if (data == NULL)
1496		return (CRYPTO_SUCCESS);
1497
1498	if (data->cd_format != CRYPTO_DATA_RAW)
1499		return (CRYPTO_ARGUMENTS_BAD);
1500
1501	gcm_params->pAAD = (uchar_t *)data->cd_raw.iov_base;
1502	gcm_params->ulAADLen = data->cd_length;
1503	return (CRYPTO_SUCCESS);
1504}
1505
1506static int
1507aes_mac_atomic(crypto_provider_handle_t provider,
1508    crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1509    crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
1510    crypto_spi_ctx_template_t template, crypto_req_handle_t req)
1511{
1512	CK_AES_GCM_PARAMS gcm_params;
1513	crypto_mechanism_t gcm_mech;
1514	int rv;
1515
1516	if ((rv = process_gmac_mech(mechanism, data, &gcm_params))
1517	    != CRYPTO_SUCCESS)
1518		return (rv);
1519
1520	gcm_mech.cm_type = AES_GCM_MECH_INFO_TYPE;
1521	gcm_mech.cm_param_len = sizeof (CK_AES_GCM_PARAMS);
1522	gcm_mech.cm_param = (char *)&gcm_params;
1523
1524	return (aes_encrypt_atomic(provider, session_id, &gcm_mech,
1525	    key, &null_crypto_data, mac, template, req));
1526}
1527
1528static int
1529aes_mac_verify_atomic(crypto_provider_handle_t provider,
1530    crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1531    crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
1532    crypto_spi_ctx_template_t template, crypto_req_handle_t req)
1533{
1534	CK_AES_GCM_PARAMS gcm_params;
1535	crypto_mechanism_t gcm_mech;
1536	int rv;
1537
1538	if ((rv = process_gmac_mech(mechanism, data, &gcm_params))
1539	    != CRYPTO_SUCCESS)
1540		return (rv);
1541
1542	gcm_mech.cm_type = AES_GCM_MECH_INFO_TYPE;
1543	gcm_mech.cm_param_len = sizeof (CK_AES_GCM_PARAMS);
1544	gcm_mech.cm_param = (char *)&gcm_params;
1545
1546	return (aes_decrypt_atomic(provider, session_id, &gcm_mech,
1547	    key, mac, &null_crypto_data, template, req));
1548}
1549
1550/*
1551 * AES Power-Up Self-Test
1552 */
1553void
1554aes_POST(int *rc)
1555{
1556
1557	int ret;
1558
1559	/* AES Power-Up Self-Test for 128-bit key. */
1560	ret = fips_aes_post(FIPS_AES_128_KEY_SIZE);
1561
1562	if (ret != CRYPTO_SUCCESS)
1563		goto out;
1564
1565	/* AES Power-Up Self-Test for 192-bit key. */
1566	ret = fips_aes_post(FIPS_AES_192_KEY_SIZE);
1567
1568	if (ret != CRYPTO_SUCCESS)
1569		goto out;
1570
1571	/* AES Power-Up Self-Test for 256-bit key. */
1572	ret = fips_aes_post(FIPS_AES_256_KEY_SIZE);
1573
1574out:
1575	*rc = ret;
1576
1577}
1578