summaryrefslogtreecommitdiff
path: root/src/lib/libcrypto/evp/e_aes.c
diff options
context:
space:
mode:
Diffstat (limited to 'src/lib/libcrypto/evp/e_aes.c')
-rw-r--r--src/lib/libcrypto/evp/e_aes.c246
1 files changed, 213 insertions, 33 deletions
diff --git a/src/lib/libcrypto/evp/e_aes.c b/src/lib/libcrypto/evp/e_aes.c
index fd1c34526f..e4d9457c96 100644
--- a/src/lib/libcrypto/evp/e_aes.c
+++ b/src/lib/libcrypto/evp/e_aes.c
@@ -786,48 +786,46 @@ aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
786 } 786 }
787} 787}
788 788
789static int 789static ctr128_f
790aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 790aes_gcm_set_key(AES_KEY *aes_key, GCM128_CONTEXT *gcm_ctx,
791 const unsigned char *iv, int enc) 791 const unsigned char *key, size_t key_len)
792{ 792{
793 EVP_AES_GCM_CTX *gctx = ctx->cipher_data;
794
795 if (!iv && !key)
796 return 1;
797 if (key) {
798 do {
799#ifdef BSAES_CAPABLE 793#ifdef BSAES_CAPABLE
800 if (BSAES_CAPABLE) { 794 if (BSAES_CAPABLE) {
801 AES_set_encrypt_key(key, ctx->key_len * 8, 795 AES_set_encrypt_key(key, key_len * 8, aes_key);
802 &gctx->ks); 796 CRYPTO_gcm128_init(gcm_ctx, aes_key, (block128_f)AES_encrypt);
803 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, 797 return (ctr128_f)bsaes_ctr32_encrypt_blocks;
804 (block128_f)AES_encrypt); 798 } else
805 gctx->ctr =
806 (ctr128_f)bsaes_ctr32_encrypt_blocks;
807 break;
808 } else
809#endif 799#endif
810#ifdef VPAES_CAPABLE 800#ifdef VPAES_CAPABLE
811 if (VPAES_CAPABLE) { 801 if (VPAES_CAPABLE) {
812 vpaes_set_encrypt_key(key, ctx->key_len * 8, 802 vpaes_set_encrypt_key(key, key_len * 8, aes_key);
813 &gctx->ks); 803 CRYPTO_gcm128_init(gcm_ctx, aes_key, (block128_f)vpaes_encrypt);
814 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, 804 return NULL;
815 (block128_f)vpaes_encrypt); 805 } else
816 gctx->ctr = NULL;
817 break;
818 } else
819#endif 806#endif
820 (void)0; /* terminate potentially open 'else' */ 807 (void)0; /* terminate potentially open 'else' */
821 808
822 AES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks); 809 AES_set_encrypt_key(key, key_len * 8, aes_key);
823 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, 810 CRYPTO_gcm128_init(gcm_ctx, aes_key, (block128_f)AES_encrypt);
824 (block128_f)AES_encrypt);
825#ifdef AES_CTR_ASM 811#ifdef AES_CTR_ASM
826 gctx->ctr = (ctr128_f)AES_ctr32_encrypt; 812 return (ctr128_f)AES_ctr32_encrypt;
827#else 813#else
828 gctx->ctr = NULL; 814 return NULL;
829#endif 815#endif
830 } while (0); 816}
817
818static int
819aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
820 const unsigned char *iv, int enc)
821{
822 EVP_AES_GCM_CTX *gctx = ctx->cipher_data;
823
824 if (!iv && !key)
825 return 1;
826 if (key) {
827 gctx->ctr = aes_gcm_set_key(&gctx->ks, &gctx->gcm,
828 key, ctx->key_len);
831 829
832 /* If we have an iv can set it directly, otherwise use 830 /* If we have an iv can set it directly, otherwise use
833 * saved IV. 831 * saved IV.
@@ -1263,4 +1261,186 @@ BLOCK_CIPHER_custom(NID_aes, 192, 1,12, ccm, CCM,
1263BLOCK_CIPHER_custom(NID_aes, 256, 1,12, ccm, CCM, 1261BLOCK_CIPHER_custom(NID_aes, 256, 1,12, ccm, CCM,
1264 EVP_CIPH_FLAG_FIPS|CUSTOM_FLAGS) 1262 EVP_CIPH_FLAG_FIPS|CUSTOM_FLAGS)
1265 1263
1264#define EVP_AEAD_AES_GCM_TAG_LEN 16
1265
1266struct aead_aes_gcm_ctx {
1267 union {
1268 double align;
1269 AES_KEY ks;
1270 } ks;
1271 GCM128_CONTEXT gcm;
1272 ctr128_f ctr;
1273 unsigned char tag_len;
1274};
1275
1276static int
1277aead_aes_gcm_init(EVP_AEAD_CTX *ctx, const unsigned char *key, size_t key_len,
1278 size_t tag_len)
1279{
1280 struct aead_aes_gcm_ctx *gcm_ctx;
1281 const size_t key_bits = key_len * 8;
1282
1283 if (key_bits != 128 && key_bits != 256) {
1284 EVPerr(EVP_F_AEAD_AES_GCM_INIT, EVP_R_BAD_KEY_LENGTH);
1285 return 0; /* EVP_AEAD_CTX_init should catch this. */
1286 }
1287
1288 if (tag_len == EVP_AEAD_DEFAULT_TAG_LENGTH)
1289 tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
1290
1291 if (tag_len > EVP_AEAD_AES_GCM_TAG_LEN) {
1292 EVPerr(EVP_F_AEAD_AES_GCM_INIT, EVP_R_TAG_TOO_LARGE);
1293 return 0;
1294 }
1295
1296 gcm_ctx = OPENSSL_malloc(sizeof(struct aead_aes_gcm_ctx));
1297 if (gcm_ctx == NULL)
1298 return 0;
1299
1300#ifdef AESNI_CAPABLE
1301 if (AESNI_CAPABLE) {
1302 aesni_set_encrypt_key(key, key_bits, &gcm_ctx->ks.ks);
1303 CRYPTO_gcm128_init(&gcm_ctx->gcm, &gcm_ctx->ks.ks,
1304 (block128_f)aesni_encrypt);
1305 gcm_ctx->ctr = (ctr128_f) aesni_ctr32_encrypt_blocks;
1306 } else
1307#endif
1308 {
1309 gcm_ctx->ctr = aes_gcm_set_key(&gcm_ctx->ks.ks, &gcm_ctx->gcm,
1310 key, key_len);
1311 }
1312 gcm_ctx->tag_len = tag_len;
1313 ctx->aead_state = gcm_ctx;
1314
1315 return 1;
1316}
1317
1318static void
1319aead_aes_gcm_cleanup(EVP_AEAD_CTX *ctx)
1320{
1321 struct aead_aes_gcm_ctx *gcm_ctx = ctx->aead_state;
1322
1323 OPENSSL_free(gcm_ctx);
1324}
1325
1326static ssize_t
1327aead_aes_gcm_seal(const EVP_AEAD_CTX *ctx, unsigned char *out,
1328 size_t max_out_len, const unsigned char *nonce, size_t nonce_len,
1329 const unsigned char *in, size_t in_len, const unsigned char *ad,
1330 size_t ad_len)
1331{
1332 size_t bulk = 0;
1333 const struct aead_aes_gcm_ctx *gcm_ctx = ctx->aead_state;
1334 GCM128_CONTEXT gcm;
1335
1336 if (max_out_len < in_len + gcm_ctx->tag_len) {
1337 EVPerr(EVP_F_AEAD_AES_GCM_SEAL, EVP_R_BUFFER_TOO_SMALL);
1338 return -1;
1339 }
1340
1341 memcpy(&gcm, &gcm_ctx->gcm, sizeof(gcm));
1342 CRYPTO_gcm128_setiv(&gcm, nonce, nonce_len);
1343
1344 if (ad_len > 0 && CRYPTO_gcm128_aad(&gcm, ad, ad_len))
1345 return -1;
1346
1347 if (gcm_ctx->ctr) {
1348 if (CRYPTO_gcm128_encrypt_ctr32(&gcm, in + bulk, out + bulk,
1349 in_len - bulk, gcm_ctx->ctr))
1350 return -1;
1351 } else {
1352 if (CRYPTO_gcm128_encrypt(&gcm, in + bulk, out + bulk,
1353 in_len - bulk))
1354 return -1;
1355 }
1356
1357 CRYPTO_gcm128_tag(&gcm, out + in_len, gcm_ctx->tag_len);
1358 return in_len + gcm_ctx->tag_len;
1359}
1360
1361static ssize_t
1362aead_aes_gcm_open(const EVP_AEAD_CTX *ctx, unsigned char *out,
1363 size_t max_out_len, const unsigned char *nonce, size_t nonce_len,
1364 const unsigned char *in, size_t in_len, const unsigned char *ad,
1365 size_t ad_len)
1366{
1367 size_t bulk = 0;
1368 const struct aead_aes_gcm_ctx *gcm_ctx = ctx->aead_state;
1369 unsigned char tag[EVP_AEAD_AES_GCM_TAG_LEN];
1370 size_t out_len;
1371 GCM128_CONTEXT gcm;
1372
1373 if (in_len < gcm_ctx->tag_len) {
1374 EVPerr(EVP_F_AEAD_AES_GCM_OPEN, EVP_R_BAD_DECRYPT);
1375 return -1;
1376 }
1377
1378 out_len = in_len - gcm_ctx->tag_len;
1379
1380 if (max_out_len < out_len) {
1381 EVPerr(EVP_F_AEAD_AES_GCM_OPEN, EVP_R_BUFFER_TOO_SMALL);
1382 return -1;
1383 }
1384
1385 memcpy(&gcm, &gcm_ctx->gcm, sizeof(gcm));
1386 CRYPTO_gcm128_setiv(&gcm, nonce, nonce_len);
1387
1388 if (CRYPTO_gcm128_aad(&gcm, ad, ad_len))
1389 return -1;
1390
1391 if (gcm_ctx->ctr) {
1392 if (CRYPTO_gcm128_decrypt_ctr32(&gcm, in + bulk, out + bulk,
1393 in_len-bulk-gcm_ctx->tag_len, gcm_ctx->ctr))
1394 return -1;
1395 } else {
1396 if (CRYPTO_gcm128_decrypt(&gcm, in + bulk, out + bulk,
1397 in_len - bulk - gcm_ctx->tag_len))
1398 return -1;
1399 }
1400
1401 CRYPTO_gcm128_tag(&gcm, tag, gcm_ctx->tag_len);
1402 if (CRYPTO_memcmp(tag, in + out_len, gcm_ctx->tag_len) != 0) {
1403 EVPerr(EVP_F_AEAD_AES_GCM_OPEN, EVP_R_BAD_DECRYPT);
1404 return -1;
1405 }
1406
1407 return out_len;
1408}
1409
1410static const EVP_AEAD aead_aes_128_gcm = {
1411 .key_len = 16,
1412 .nonce_len = 12,
1413 .overhead = EVP_AEAD_AES_GCM_TAG_LEN,
1414 .max_tag_len = EVP_AEAD_AES_GCM_TAG_LEN,
1415
1416 .init = aead_aes_gcm_init,
1417 .cleanup = aead_aes_gcm_cleanup,
1418 .seal = aead_aes_gcm_seal,
1419 .open = aead_aes_gcm_open,
1420};
1421
1422static const EVP_AEAD aead_aes_256_gcm = {
1423 .key_len = 32,
1424 .nonce_len = 12,
1425 .overhead = EVP_AEAD_AES_GCM_TAG_LEN,
1426 .max_tag_len = EVP_AEAD_AES_GCM_TAG_LEN,
1427
1428 .init = aead_aes_gcm_init,
1429 .cleanup = aead_aes_gcm_cleanup,
1430 .seal = aead_aes_gcm_seal,
1431 .open = aead_aes_gcm_open,
1432};
1433
1434const EVP_AEAD *
1435EVP_aead_aes_128_gcm(void)
1436{
1437 return &aead_aes_128_gcm;
1438}
1439
1440const EVP_AEAD *
1441EVP_aead_aes_256_gcm(void)
1442{
1443 return &aead_aes_256_gcm;
1444}
1445
1266#endif 1446#endif