1/*
2 * Copyright 2019-2021 The OpenSSL Project Authors. All Rights Reserved.
3 *
4 * Licensed under the Apache License 2.0 (the "License").  You may not use
5 * this file except in compliance with the License.  You can obtain a copy
6 * in the file LICENSE in the source distribution or at
7 * https://www.openssl.org/source/license.html
8 */
9
10/* Dispatch functions for AES GCM mode */
11
12/*
13 * This file uses the low level AES functions (which are deprecated for
14 * non-internal use) in order to implement provider AES ciphers.
15 */
16#include "internal/deprecated.h"
17
18#include "cipher_aes_gcm.h"
19
20static int aes_gcm_initkey(PROV_GCM_CTX *ctx, const unsigned char *key,
21                                   size_t keylen)
22{
23    PROV_AES_GCM_CTX *actx = (PROV_AES_GCM_CTX *)ctx;
24    AES_KEY *ks = &actx->ks.ks;
25
26# ifdef HWAES_CAPABLE
27    if (HWAES_CAPABLE) {
28#  ifdef HWAES_ctr32_encrypt_blocks
29        GCM_HW_SET_KEY_CTR_FN(ks, HWAES_set_encrypt_key, HWAES_encrypt,
30                              HWAES_ctr32_encrypt_blocks);
31#  else
32        GCM_HW_SET_KEY_CTR_FN(ks, HWAES_set_encrypt_key, HWAES_encrypt, NULL);
33#  endif /* HWAES_ctr32_encrypt_blocks */
34    } else
35# endif /* HWAES_CAPABLE */
36
37# ifdef BSAES_CAPABLE
38    if (BSAES_CAPABLE) {
39        GCM_HW_SET_KEY_CTR_FN(ks, AES_set_encrypt_key, AES_encrypt,
40                              ossl_bsaes_ctr32_encrypt_blocks);
41    } else
42# endif /* BSAES_CAPABLE */
43
44# ifdef VPAES_CAPABLE
45    if (VPAES_CAPABLE) {
46        GCM_HW_SET_KEY_CTR_FN(ks, vpaes_set_encrypt_key, vpaes_encrypt, NULL);
47    } else
48# endif /* VPAES_CAPABLE */
49
50    {
51# ifdef AES_CTR_ASM
52        GCM_HW_SET_KEY_CTR_FN(ks, AES_set_encrypt_key, AES_encrypt,
53                              AES_ctr32_encrypt);
54# else
55        GCM_HW_SET_KEY_CTR_FN(ks, AES_set_encrypt_key, AES_encrypt, NULL);
56# endif /* AES_CTR_ASM */
57    }
58    ctx->key_set = 1;
59    return 1;
60}
61
62static int generic_aes_gcm_cipher_update(PROV_GCM_CTX *ctx, const unsigned char *in,
63                                         size_t len, unsigned char *out)
64{
65    if (ctx->enc) {
66        if (ctx->ctr != NULL) {
67#if defined(AES_GCM_ASM)
68            size_t bulk = 0;
69
70            if (len >= AES_GCM_ENC_BYTES && AES_GCM_ASM(ctx)) {
71                size_t res = (16 - ctx->gcm.mres) % 16;
72
73                if (CRYPTO_gcm128_encrypt(&ctx->gcm, in, out, res))
74                    return 0;
75
76                bulk = AES_gcm_encrypt(in + res, out + res, len - res,
77                                       ctx->gcm.key,
78                                       ctx->gcm.Yi.c, ctx->gcm.Xi.u);
79
80                ctx->gcm.len.u[1] += bulk;
81                bulk += res;
82            }
83            if (CRYPTO_gcm128_encrypt_ctr32(&ctx->gcm, in + bulk, out + bulk,
84                                            len - bulk, ctx->ctr))
85                return 0;
86#else
87            if (CRYPTO_gcm128_encrypt_ctr32(&ctx->gcm, in, out, len, ctx->ctr))
88                return 0;
89#endif /* AES_GCM_ASM */
90        } else {
91            if (CRYPTO_gcm128_encrypt(&ctx->gcm, in, out, len))
92                return 0;
93        }
94    } else {
95        if (ctx->ctr != NULL) {
96#if defined(AES_GCM_ASM)
97            size_t bulk = 0;
98
99            if (len >= AES_GCM_DEC_BYTES && AES_GCM_ASM(ctx)) {
100                size_t res = (16 - ctx->gcm.mres) % 16;
101
102                if (CRYPTO_gcm128_decrypt(&ctx->gcm, in, out, res))
103                    return -1;
104
105                bulk = AES_gcm_decrypt(in + res, out + res, len - res,
106                                       ctx->gcm.key,
107                                       ctx->gcm.Yi.c, ctx->gcm.Xi.u);
108
109                ctx->gcm.len.u[1] += bulk;
110                bulk += res;
111            }
112            if (CRYPTO_gcm128_decrypt_ctr32(&ctx->gcm, in + bulk, out + bulk,
113                                            len - bulk, ctx->ctr))
114                return 0;
115#else
116            if (CRYPTO_gcm128_decrypt_ctr32(&ctx->gcm, in, out, len, ctx->ctr))
117                return 0;
118#endif /* AES_GCM_ASM */
119        } else {
120            if (CRYPTO_gcm128_decrypt(&ctx->gcm, in, out, len))
121                return 0;
122        }
123    }
124    return 1;
125}
126
127static const PROV_GCM_HW aes_gcm = {
128    aes_gcm_initkey,
129    ossl_gcm_setiv,
130    ossl_gcm_aad_update,
131    generic_aes_gcm_cipher_update,
132    ossl_gcm_cipher_final,
133    ossl_gcm_one_shot
134};
135
136#if defined(S390X_aes_128_CAPABLE)
137# include "cipher_aes_gcm_hw_s390x.inc"
138#elif defined(AESNI_CAPABLE)
139# include "cipher_aes_gcm_hw_aesni.inc"
140#elif defined(SPARC_AES_CAPABLE)
141# include "cipher_aes_gcm_hw_t4.inc"
142#elif defined(AES_PMULL_CAPABLE) && defined(AES_GCM_ASM)
143# include "cipher_aes_gcm_hw_armv8.inc"
144#else
145const PROV_GCM_HW *ossl_prov_aes_hw_gcm(size_t keybits)
146{
147    return &aes_gcm;
148}
149#endif
150
151