46#ifdef _KERNEL
47MALLOC_DECLARE(M_ELI);
48
49static int
50g_eli_crypto_done(struct cryptop *crp)
51{
52
53 crp->crp_opaque = (void *)crp;
54 wakeup(crp);
55 return (0);
56}
57
58static int
59g_eli_crypto_cipher(u_int algo, int enc, u_char *data, size_t datasize,
60 const u_char *key, size_t keysize)
61{
62 struct cryptoini cri;
63 struct cryptop *crp;
64 struct cryptodesc *crd;
65 uint64_t sid;
66 u_char *p;
67 int error;
68
69 KASSERT(algo != CRYPTO_AES_XTS,
70 ("%s: CRYPTO_AES_XTS unexpected here", __func__));
71
72 bzero(&cri, sizeof(cri));
73 cri.cri_alg = algo;
74 cri.cri_key = __DECONST(void *, key);
75 cri.cri_klen = keysize;
76 error = crypto_newsession(&sid, &cri, CRYPTOCAP_F_SOFTWARE);
77 if (error != 0)
78 return (error);
79 p = malloc(sizeof(*crp) + sizeof(*crd), M_ELI, M_NOWAIT | M_ZERO);
80 if (p == NULL) {
81 crypto_freesession(sid);
82 return (ENOMEM);
83 }
84 crp = (struct cryptop *)p; p += sizeof(*crp);
85 crd = (struct cryptodesc *)p; p += sizeof(*crd);
86
87 crd->crd_skip = 0;
88 crd->crd_len = datasize;
89 crd->crd_flags = CRD_F_IV_EXPLICIT | CRD_F_IV_PRESENT;
90 if (enc)
91 crd->crd_flags |= CRD_F_ENCRYPT;
92 crd->crd_alg = algo;
93 crd->crd_key = __DECONST(void *, key);
94 crd->crd_klen = keysize;
95 bzero(crd->crd_iv, sizeof(crd->crd_iv));
96 crd->crd_next = NULL;
97
98 crp->crp_sid = sid;
99 crp->crp_ilen = datasize;
100 crp->crp_olen = datasize;
101 crp->crp_opaque = NULL;
102 crp->crp_callback = g_eli_crypto_done;
103 crp->crp_buf = (void *)data;
104 crp->crp_flags = CRYPTO_F_CBIFSYNC;
105 crp->crp_desc = crd;
106
107 error = crypto_dispatch(crp);
108 if (error == 0) {
109 while (crp->crp_opaque == NULL)
110 tsleep(crp, PRIBIO, "geli", hz / 5);
111 error = crp->crp_etype;
112 }
113
114 free(crp, M_ELI);
115 crypto_freesession(sid);
116 return (error);
117}
118#else /* !_KERNEL */
119static int
120g_eli_crypto_cipher(u_int algo, int enc, u_char *data, size_t datasize,
121 const u_char *key, size_t keysize)
122{
123 EVP_CIPHER_CTX ctx;
124 const EVP_CIPHER *type;
125 u_char iv[keysize];
126 int outsize;
127
128 assert(algo != CRYPTO_AES_XTS);
129
130 switch (algo) {
131 case CRYPTO_NULL_CBC:
132 type = EVP_enc_null();
133 break;
134 case CRYPTO_AES_CBC:
135 switch (keysize) {
136 case 128:
137 type = EVP_aes_128_cbc();
138 break;
139 case 192:
140 type = EVP_aes_192_cbc();
141 break;
142 case 256:
143 type = EVP_aes_256_cbc();
144 break;
145 default:
146 return (EINVAL);
147 }
148 break;
149 case CRYPTO_BLF_CBC:
150 type = EVP_bf_cbc();
151 break;
152#ifndef OPENSSL_NO_CAMELLIA
153 case CRYPTO_CAMELLIA_CBC:
154 switch (keysize) {
155 case 128:
156 type = EVP_camellia_128_cbc();
157 break;
158 case 192:
159 type = EVP_camellia_192_cbc();
160 break;
161 case 256:
162 type = EVP_camellia_256_cbc();
163 break;
164 default:
165 return (EINVAL);
166 }
167 break;
168#endif
169 case CRYPTO_3DES_CBC:
170 type = EVP_des_ede3_cbc();
171 break;
172 default:
173 return (EINVAL);
174 }
175
176 EVP_CIPHER_CTX_init(&ctx);
177
178 EVP_CipherInit_ex(&ctx, type, NULL, NULL, NULL, enc);
179 EVP_CIPHER_CTX_set_key_length(&ctx, keysize / 8);
180 EVP_CIPHER_CTX_set_padding(&ctx, 0);
181 bzero(iv, sizeof(iv));
182 EVP_CipherInit_ex(&ctx, NULL, NULL, key, iv, enc);
183
184 if (EVP_CipherUpdate(&ctx, data, &outsize, data, datasize) == 0) {
185 EVP_CIPHER_CTX_cleanup(&ctx);
186 return (EINVAL);
187 }
188 assert(outsize == (int)datasize);
189
190 if (EVP_CipherFinal_ex(&ctx, data + outsize, &outsize) == 0) {
191 EVP_CIPHER_CTX_cleanup(&ctx);
192 return (EINVAL);
193 }
194 assert(outsize == 0);
195
196 EVP_CIPHER_CTX_cleanup(&ctx);
197 return (0);
198}
199#endif /* !_KERNEL */
200
201int
202g_eli_crypto_encrypt(u_int algo, u_char *data, size_t datasize,
203 const u_char *key, size_t keysize)
204{
205
206 /* We prefer AES-CBC for metadata protection. */
207 if (algo == CRYPTO_AES_XTS)
208 algo = CRYPTO_AES_CBC;
209
210 return (g_eli_crypto_cipher(algo, 1, data, datasize, key, keysize));
211}
212
213int
214g_eli_crypto_decrypt(u_int algo, u_char *data, size_t datasize,
215 const u_char *key, size_t keysize)
216{
217
218 /* We prefer AES-CBC for metadata protection. */
219 if (algo == CRYPTO_AES_XTS)
220 algo = CRYPTO_AES_CBC;
221
222 return (g_eli_crypto_cipher(algo, 0, data, datasize, key, keysize));
223}
224
|
46void 47g_eli_crypto_hmac_init(struct hmac_ctx *ctx, const uint8_t *hkey, 48 size_t hkeylen) 49{ 50 u_char k_ipad[128], key[128]; 51 SHA512_CTX lctx; 52 u_int i; 53
--- 55 unchanged lines hidden (view full) ---
109 size_t datasize, uint8_t *md, size_t mdsize) 110{ 111 struct hmac_ctx ctx; 112 113 g_eli_crypto_hmac_init(&ctx, hkey, hkeysize); 114 g_eli_crypto_hmac_update(&ctx, data, datasize); 115 g_eli_crypto_hmac_final(&ctx, md, mdsize); 116}
|
117 118/* 119 * Here we generate IV. It is unique for every sector. 120 */ 121void 122g_eli_crypto_ivgen(struct g_eli_softc *sc, off_t offset, u_char *iv, 123 size_t size) 124{ 125 uint8_t off[8]; 126 127 if ((sc->sc_flags & G_ELI_FLAG_NATIVE_BYTE_ORDER) != 0) 128 bcopy(&offset, off, sizeof(off)); 129 else 130 le64enc(off, (uint64_t)offset); 131 132 switch (sc->sc_ealgo) { 133 case CRYPTO_AES_XTS: 134 bcopy(off, iv, sizeof(off)); 135 bzero(iv + sizeof(off), size - sizeof(off)); 136 break; 137 default: 138 { 139 u_char hash[SHA256_DIGEST_LENGTH]; 140 SHA256_CTX ctx; 141 142 /* Copy precalculated SHA256 context for IV-Key. */ 143 bcopy(&sc->sc_ivctx, &ctx, sizeof(ctx)); 144 SHA256_Update(&ctx, off, sizeof(off)); 145 SHA256_Final(hash, &ctx); 146 bcopy(hash, iv, MIN(sizeof(hash), size)); 147 break; 148 } 149 } 150} |