diff options
| author | miod <> | 2014-07-09 16:06:14 +0000 |
|---|---|---|
| committer | miod <> | 2014-07-09 16:06:14 +0000 |
| commit | eb3e7d7ffe444652fe6f90808f95e08bc99cf5ae (patch) | |
| tree | 49de6ec1ff73debc360d8605d09d765a8e604a22 /src/lib/libcrypto/modes/gcm128.c | |
| parent | 241b6a38fd4cff90e8e18e7c8dd2e6371f86a8e9 (diff) | |
| download | openbsd-eb3e7d7ffe444652fe6f90808f95e08bc99cf5ae.tar.gz openbsd-eb3e7d7ffe444652fe6f90808f95e08bc99cf5ae.tar.bz2 openbsd-eb3e7d7ffe444652fe6f90808f95e08bc99cf5ae.zip | |
Remove leading underscore from _BYTE_ORDER and _{LITTLE,BIG}_ENDIAN, to be
more friendly to systems where the underscore flavours may be defined as empty.
Found the hard way be bcook@; joint brainstrom with bcook beck and guenther
Diffstat (limited to '')
| -rw-r--r-- | src/lib/libcrypto/modes/gcm128.c | 64 |
1 files changed, 32 insertions, 32 deletions
diff --git a/src/lib/libcrypto/modes/gcm128.c b/src/lib/libcrypto/modes/gcm128.c index def8cca496..5b600517be 100644 --- a/src/lib/libcrypto/modes/gcm128.c +++ b/src/lib/libcrypto/modes/gcm128.c | |||
| @@ -1,4 +1,4 @@ | |||
| 1 | /* $OpenBSD: gcm128.c,v 1.9 2014/06/27 06:07:35 deraadt Exp $ */ | 1 | /* $OpenBSD: gcm128.c,v 1.10 2014/07/09 16:06:13 miod Exp $ */ |
| 2 | /* ==================================================================== | 2 | /* ==================================================================== |
| 3 | * Copyright (c) 2010 The OpenSSL Project. All rights reserved. | 3 | * Copyright (c) 2010 The OpenSSL Project. All rights reserved. |
| 4 | * | 4 | * |
| @@ -231,7 +231,7 @@ static void gcm_gmult_8bit(u64 Xi[2], const u128 Htable[256]) | |||
| 231 | Z.hi ^= (u64)rem_8bit[rem]<<32; | 231 | Z.hi ^= (u64)rem_8bit[rem]<<32; |
| 232 | } | 232 | } |
| 233 | 233 | ||
| 234 | if (_BYTE_ORDER == _LITTLE_ENDIAN) { | 234 | if (BYTE_ORDER == LITTLE_ENDIAN) { |
| 235 | #ifdef BSWAP8 | 235 | #ifdef BSWAP8 |
| 236 | Xi[0] = BSWAP8(Z.hi); | 236 | Xi[0] = BSWAP8(Z.hi); |
| 237 | Xi[1] = BSWAP8(Z.lo); | 237 | Xi[1] = BSWAP8(Z.lo); |
| @@ -308,7 +308,7 @@ static void gcm_init_4bit(u128 Htable[16], u64 H[2]) | |||
| 308 | { | 308 | { |
| 309 | int j; | 309 | int j; |
| 310 | 310 | ||
| 311 | if (_BYTE_ORDER == _LITTLE_ENDIAN) | 311 | if (BYTE_ORDER == LITTLE_ENDIAN) |
| 312 | for (j=0;j<16;++j) { | 312 | for (j=0;j<16;++j) { |
| 313 | V = Htable[j]; | 313 | V = Htable[j]; |
| 314 | Htable[j].hi = V.lo; | 314 | Htable[j].hi = V.lo; |
| @@ -374,7 +374,7 @@ static void gcm_gmult_4bit(u64 Xi[2], const u128 Htable[16]) | |||
| 374 | Z.lo ^= Htable[nlo].lo; | 374 | Z.lo ^= Htable[nlo].lo; |
| 375 | } | 375 | } |
| 376 | 376 | ||
| 377 | if (_BYTE_ORDER == _LITTLE_ENDIAN) { | 377 | if (BYTE_ORDER == LITTLE_ENDIAN) { |
| 378 | #ifdef BSWAP8 | 378 | #ifdef BSWAP8 |
| 379 | Xi[0] = BSWAP8(Z.hi); | 379 | Xi[0] = BSWAP8(Z.hi); |
| 380 | Xi[1] = BSWAP8(Z.lo); | 380 | Xi[1] = BSWAP8(Z.lo); |
| @@ -543,7 +543,7 @@ static void gcm_ghash_4bit(u64 Xi[2],const u128 Htable[16], | |||
| 543 | Z.hi ^= ((u64)rem_8bit[rem<<4])<<48; | 543 | Z.hi ^= ((u64)rem_8bit[rem<<4])<<48; |
| 544 | #endif | 544 | #endif |
| 545 | 545 | ||
| 546 | if (_BYTE_ORDER == _LITTLE_ENDIAN) { | 546 | if (BYTE_ORDER == LITTLE_ENDIAN) { |
| 547 | #ifdef BSWAP8 | 547 | #ifdef BSWAP8 |
| 548 | Xi[0] = BSWAP8(Z.hi); | 548 | Xi[0] = BSWAP8(Z.hi); |
| 549 | Xi[1] = BSWAP8(Z.lo); | 549 | Xi[1] = BSWAP8(Z.lo); |
| @@ -590,7 +590,7 @@ static void gcm_gmult_1bit(u64 Xi[2],const u64 H[2]) | |||
| 590 | V.lo = H[1]; | 590 | V.lo = H[1]; |
| 591 | 591 | ||
| 592 | for (j=0; j<16/sizeof(long); ++j) { | 592 | for (j=0; j<16/sizeof(long); ++j) { |
| 593 | if (_BYTE_ORDER == _LITTLE_ENDIAN) { | 593 | if (BYTE_ORDER == LITTLE_ENDIAN) { |
| 594 | if (sizeof(long)==8) { | 594 | if (sizeof(long)==8) { |
| 595 | #ifdef BSWAP8 | 595 | #ifdef BSWAP8 |
| 596 | X = (long)(BSWAP8(xi[j])); | 596 | X = (long)(BSWAP8(xi[j])); |
| @@ -616,7 +616,7 @@ static void gcm_gmult_1bit(u64 Xi[2],const u64 H[2]) | |||
| 616 | } | 616 | } |
| 617 | } | 617 | } |
| 618 | 618 | ||
| 619 | if (_BYTE_ORDER == _LITTLE_ENDIAN) { | 619 | if (BYTE_ORDER == LITTLE_ENDIAN) { |
| 620 | #ifdef BSWAP8 | 620 | #ifdef BSWAP8 |
| 621 | Xi[0] = BSWAP8(Z.hi); | 621 | Xi[0] = BSWAP8(Z.hi); |
| 622 | Xi[1] = BSWAP8(Z.lo); | 622 | Xi[1] = BSWAP8(Z.lo); |
| @@ -687,7 +687,7 @@ void CRYPTO_gcm128_init(GCM128_CONTEXT *ctx,void *key,block128_f block) | |||
| 687 | 687 | ||
| 688 | (*block)(ctx->H.c,ctx->H.c,key); | 688 | (*block)(ctx->H.c,ctx->H.c,key); |
| 689 | 689 | ||
| 690 | if (_BYTE_ORDER == _LITTLE_ENDIAN) { | 690 | if (BYTE_ORDER == LITTLE_ENDIAN) { |
| 691 | /* H is stored in host byte order */ | 691 | /* H is stored in host byte order */ |
| 692 | #ifdef BSWAP8 | 692 | #ifdef BSWAP8 |
| 693 | ctx->H.u[0] = BSWAP8(ctx->H.u[0]); | 693 | ctx->H.u[0] = BSWAP8(ctx->H.u[0]); |
| @@ -783,7 +783,7 @@ void CRYPTO_gcm128_setiv(GCM128_CONTEXT *ctx,const unsigned char *iv,size_t len) | |||
| 783 | GCM_MUL(ctx,Yi); | 783 | GCM_MUL(ctx,Yi); |
| 784 | } | 784 | } |
| 785 | len0 <<= 3; | 785 | len0 <<= 3; |
| 786 | if (_BYTE_ORDER == _LITTLE_ENDIAN) { | 786 | if (BYTE_ORDER == LITTLE_ENDIAN) { |
| 787 | #ifdef BSWAP8 | 787 | #ifdef BSWAP8 |
| 788 | ctx->Yi.u[1] ^= BSWAP8(len0); | 788 | ctx->Yi.u[1] ^= BSWAP8(len0); |
| 789 | #else | 789 | #else |
| @@ -802,7 +802,7 @@ void CRYPTO_gcm128_setiv(GCM128_CONTEXT *ctx,const unsigned char *iv,size_t len) | |||
| 802 | 802 | ||
| 803 | GCM_MUL(ctx,Yi); | 803 | GCM_MUL(ctx,Yi); |
| 804 | 804 | ||
| 805 | if (_BYTE_ORDER == _LITTLE_ENDIAN) | 805 | if (BYTE_ORDER == LITTLE_ENDIAN) |
| 806 | #ifdef BSWAP4 | 806 | #ifdef BSWAP4 |
| 807 | ctr = BSWAP4(ctx->Yi.d[3]); | 807 | ctr = BSWAP4(ctx->Yi.d[3]); |
| 808 | #else | 808 | #else |
| @@ -814,7 +814,7 @@ void CRYPTO_gcm128_setiv(GCM128_CONTEXT *ctx,const unsigned char *iv,size_t len) | |||
| 814 | 814 | ||
| 815 | (*ctx->block)(ctx->Yi.c,ctx->EK0.c,ctx->key); | 815 | (*ctx->block)(ctx->Yi.c,ctx->EK0.c,ctx->key); |
| 816 | ++ctr; | 816 | ++ctr; |
| 817 | if (_BYTE_ORDER == _LITTLE_ENDIAN) | 817 | if (BYTE_ORDER == LITTLE_ENDIAN) |
| 818 | #ifdef BSWAP4 | 818 | #ifdef BSWAP4 |
| 819 | ctx->Yi.d[3] = BSWAP4(ctr); | 819 | ctx->Yi.d[3] = BSWAP4(ctr); |
| 820 | #else | 820 | #else |
| @@ -912,7 +912,7 @@ int CRYPTO_gcm128_encrypt(GCM128_CONTEXT *ctx, | |||
| 912 | ctx->ares = 0; | 912 | ctx->ares = 0; |
| 913 | } | 913 | } |
| 914 | 914 | ||
| 915 | if (_BYTE_ORDER == _LITTLE_ENDIAN) | 915 | if (BYTE_ORDER == LITTLE_ENDIAN) |
| 916 | #ifdef BSWAP4 | 916 | #ifdef BSWAP4 |
| 917 | ctr = BSWAP4(ctx->Yi.d[3]); | 917 | ctr = BSWAP4(ctx->Yi.d[3]); |
| 918 | #else | 918 | #else |
| @@ -950,7 +950,7 @@ int CRYPTO_gcm128_encrypt(GCM128_CONTEXT *ctx, | |||
| 950 | 950 | ||
| 951 | (*block)(ctx->Yi.c,ctx->EKi.c,key); | 951 | (*block)(ctx->Yi.c,ctx->EKi.c,key); |
| 952 | ++ctr; | 952 | ++ctr; |
| 953 | if (_BYTE_ORDER == _LITTLE_ENDIAN) | 953 | if (BYTE_ORDER == LITTLE_ENDIAN) |
| 954 | #ifdef BSWAP4 | 954 | #ifdef BSWAP4 |
| 955 | ctx->Yi.d[3] = BSWAP4(ctr); | 955 | ctx->Yi.d[3] = BSWAP4(ctr); |
| 956 | #else | 956 | #else |
| @@ -976,7 +976,7 @@ int CRYPTO_gcm128_encrypt(GCM128_CONTEXT *ctx, | |||
| 976 | 976 | ||
| 977 | (*block)(ctx->Yi.c,ctx->EKi.c,key); | 977 | (*block)(ctx->Yi.c,ctx->EKi.c,key); |
| 978 | ++ctr; | 978 | ++ctr; |
| 979 | if (_BYTE_ORDER == _LITTLE_ENDIAN) | 979 | if (BYTE_ORDER == LITTLE_ENDIAN) |
| 980 | #ifdef BSWAP4 | 980 | #ifdef BSWAP4 |
| 981 | ctx->Yi.d[3] = BSWAP4(ctr); | 981 | ctx->Yi.d[3] = BSWAP4(ctr); |
| 982 | #else | 982 | #else |
| @@ -999,7 +999,7 @@ int CRYPTO_gcm128_encrypt(GCM128_CONTEXT *ctx, | |||
| 999 | 999 | ||
| 1000 | (*block)(ctx->Yi.c,ctx->EKi.c,key); | 1000 | (*block)(ctx->Yi.c,ctx->EKi.c,key); |
| 1001 | ++ctr; | 1001 | ++ctr; |
| 1002 | if (_BYTE_ORDER == _LITTLE_ENDIAN) | 1002 | if (BYTE_ORDER == LITTLE_ENDIAN) |
| 1003 | #ifdef BSWAP4 | 1003 | #ifdef BSWAP4 |
| 1004 | ctx->Yi.d[3] = BSWAP4(ctr); | 1004 | ctx->Yi.d[3] = BSWAP4(ctr); |
| 1005 | #else | 1005 | #else |
| @@ -1019,7 +1019,7 @@ int CRYPTO_gcm128_encrypt(GCM128_CONTEXT *ctx, | |||
| 1019 | if (len) { | 1019 | if (len) { |
| 1020 | (*block)(ctx->Yi.c,ctx->EKi.c,key); | 1020 | (*block)(ctx->Yi.c,ctx->EKi.c,key); |
| 1021 | ++ctr; | 1021 | ++ctr; |
| 1022 | if (_BYTE_ORDER == _LITTLE_ENDIAN) | 1022 | if (BYTE_ORDER == LITTLE_ENDIAN) |
| 1023 | #ifdef BSWAP4 | 1023 | #ifdef BSWAP4 |
| 1024 | ctx->Yi.d[3] = BSWAP4(ctr); | 1024 | ctx->Yi.d[3] = BSWAP4(ctr); |
| 1025 | #else | 1025 | #else |
| @@ -1041,7 +1041,7 @@ int CRYPTO_gcm128_encrypt(GCM128_CONTEXT *ctx, | |||
| 1041 | if (n==0) { | 1041 | if (n==0) { |
| 1042 | (*block)(ctx->Yi.c,ctx->EKi.c,key); | 1042 | (*block)(ctx->Yi.c,ctx->EKi.c,key); |
| 1043 | ++ctr; | 1043 | ++ctr; |
| 1044 | if (_BYTE_ORDER == _LITTLE_ENDIAN) | 1044 | if (BYTE_ORDER == LITTLE_ENDIAN) |
| 1045 | #ifdef BSWAP4 | 1045 | #ifdef BSWAP4 |
| 1046 | ctx->Yi.d[3] = BSWAP4(ctr); | 1046 | ctx->Yi.d[3] = BSWAP4(ctr); |
| 1047 | #else | 1047 | #else |
| @@ -1088,7 +1088,7 @@ int CRYPTO_gcm128_decrypt(GCM128_CONTEXT *ctx, | |||
| 1088 | ctx->ares = 0; | 1088 | ctx->ares = 0; |
| 1089 | } | 1089 | } |
| 1090 | 1090 | ||
| 1091 | if (_BYTE_ORDER == _LITTLE_ENDIAN) | 1091 | if (BYTE_ORDER == LITTLE_ENDIAN) |
| 1092 | #ifdef BSWAP4 | 1092 | #ifdef BSWAP4 |
| 1093 | ctr = BSWAP4(ctx->Yi.d[3]); | 1093 | ctr = BSWAP4(ctx->Yi.d[3]); |
| 1094 | #else | 1094 | #else |
| @@ -1129,7 +1129,7 @@ int CRYPTO_gcm128_decrypt(GCM128_CONTEXT *ctx, | |||
| 1129 | 1129 | ||
| 1130 | (*block)(ctx->Yi.c,ctx->EKi.c,key); | 1130 | (*block)(ctx->Yi.c,ctx->EKi.c,key); |
| 1131 | ++ctr; | 1131 | ++ctr; |
| 1132 | if (_BYTE_ORDER == _LITTLE_ENDIAN) | 1132 | if (BYTE_ORDER == LITTLE_ENDIAN) |
| 1133 | #ifdef BSWAP4 | 1133 | #ifdef BSWAP4 |
| 1134 | ctx->Yi.d[3] = BSWAP4(ctr); | 1134 | ctx->Yi.d[3] = BSWAP4(ctr); |
| 1135 | #else | 1135 | #else |
| @@ -1153,7 +1153,7 @@ int CRYPTO_gcm128_decrypt(GCM128_CONTEXT *ctx, | |||
| 1153 | 1153 | ||
| 1154 | (*block)(ctx->Yi.c,ctx->EKi.c,key); | 1154 | (*block)(ctx->Yi.c,ctx->EKi.c,key); |
| 1155 | ++ctr; | 1155 | ++ctr; |
| 1156 | if (_BYTE_ORDER == _LITTLE_ENDIAN) | 1156 | if (BYTE_ORDER == LITTLE_ENDIAN) |
| 1157 | #ifdef BSWAP4 | 1157 | #ifdef BSWAP4 |
| 1158 | ctx->Yi.d[3] = BSWAP4(ctr); | 1158 | ctx->Yi.d[3] = BSWAP4(ctr); |
| 1159 | #else | 1159 | #else |
| @@ -1175,7 +1175,7 @@ int CRYPTO_gcm128_decrypt(GCM128_CONTEXT *ctx, | |||
| 1175 | 1175 | ||
| 1176 | (*block)(ctx->Yi.c,ctx->EKi.c,key); | 1176 | (*block)(ctx->Yi.c,ctx->EKi.c,key); |
| 1177 | ++ctr; | 1177 | ++ctr; |
| 1178 | if (_BYTE_ORDER == _LITTLE_ENDIAN) | 1178 | if (BYTE_ORDER == LITTLE_ENDIAN) |
| 1179 | #ifdef BSWAP4 | 1179 | #ifdef BSWAP4 |
| 1180 | ctx->Yi.d[3] = BSWAP4(ctr); | 1180 | ctx->Yi.d[3] = BSWAP4(ctr); |
| 1181 | #else | 1181 | #else |
| @@ -1197,7 +1197,7 @@ int CRYPTO_gcm128_decrypt(GCM128_CONTEXT *ctx, | |||
| 1197 | if (len) { | 1197 | if (len) { |
| 1198 | (*block)(ctx->Yi.c,ctx->EKi.c,key); | 1198 | (*block)(ctx->Yi.c,ctx->EKi.c,key); |
| 1199 | ++ctr; | 1199 | ++ctr; |
| 1200 | if (_BYTE_ORDER == _LITTLE_ENDIAN) | 1200 | if (BYTE_ORDER == LITTLE_ENDIAN) |
| 1201 | #ifdef BSWAP4 | 1201 | #ifdef BSWAP4 |
| 1202 | ctx->Yi.d[3] = BSWAP4(ctr); | 1202 | ctx->Yi.d[3] = BSWAP4(ctr); |
| 1203 | #else | 1203 | #else |
| @@ -1222,7 +1222,7 @@ int CRYPTO_gcm128_decrypt(GCM128_CONTEXT *ctx, | |||
| 1222 | if (n==0) { | 1222 | if (n==0) { |
| 1223 | (*block)(ctx->Yi.c,ctx->EKi.c,key); | 1223 | (*block)(ctx->Yi.c,ctx->EKi.c,key); |
| 1224 | ++ctr; | 1224 | ++ctr; |
| 1225 | if (_BYTE_ORDER == _LITTLE_ENDIAN) | 1225 | if (BYTE_ORDER == LITTLE_ENDIAN) |
| 1226 | #ifdef BSWAP4 | 1226 | #ifdef BSWAP4 |
| 1227 | ctx->Yi.d[3] = BSWAP4(ctr); | 1227 | ctx->Yi.d[3] = BSWAP4(ctr); |
| 1228 | #else | 1228 | #else |
| @@ -1270,7 +1270,7 @@ int CRYPTO_gcm128_encrypt_ctr32(GCM128_CONTEXT *ctx, | |||
| 1270 | ctx->ares = 0; | 1270 | ctx->ares = 0; |
| 1271 | } | 1271 | } |
| 1272 | 1272 | ||
| 1273 | if (_BYTE_ORDER == _LITTLE_ENDIAN) | 1273 | if (BYTE_ORDER == LITTLE_ENDIAN) |
| 1274 | #ifdef BSWAP4 | 1274 | #ifdef BSWAP4 |
| 1275 | ctr = BSWAP4(ctx->Yi.d[3]); | 1275 | ctr = BSWAP4(ctx->Yi.d[3]); |
| 1276 | #else | 1276 | #else |
| @@ -1296,7 +1296,7 @@ int CRYPTO_gcm128_encrypt_ctr32(GCM128_CONTEXT *ctx, | |||
| 1296 | while (len>=GHASH_CHUNK) { | 1296 | while (len>=GHASH_CHUNK) { |
| 1297 | (*stream)(in,out,GHASH_CHUNK/16,key,ctx->Yi.c); | 1297 | (*stream)(in,out,GHASH_CHUNK/16,key,ctx->Yi.c); |
| 1298 | ctr += GHASH_CHUNK/16; | 1298 | ctr += GHASH_CHUNK/16; |
| 1299 | if (_BYTE_ORDER == _LITTLE_ENDIAN) | 1299 | if (BYTE_ORDER == LITTLE_ENDIAN) |
| 1300 | #ifdef BSWAP4 | 1300 | #ifdef BSWAP4 |
| 1301 | ctx->Yi.d[3] = BSWAP4(ctr); | 1301 | ctx->Yi.d[3] = BSWAP4(ctr); |
| 1302 | #else | 1302 | #else |
| @@ -1315,7 +1315,7 @@ int CRYPTO_gcm128_encrypt_ctr32(GCM128_CONTEXT *ctx, | |||
| 1315 | 1315 | ||
| 1316 | (*stream)(in,out,j,key,ctx->Yi.c); | 1316 | (*stream)(in,out,j,key,ctx->Yi.c); |
| 1317 | ctr += (unsigned int)j; | 1317 | ctr += (unsigned int)j; |
| 1318 | if (_BYTE_ORDER == _LITTLE_ENDIAN) | 1318 | if (BYTE_ORDER == LITTLE_ENDIAN) |
| 1319 | #ifdef BSWAP4 | 1319 | #ifdef BSWAP4 |
| 1320 | ctx->Yi.d[3] = BSWAP4(ctr); | 1320 | ctx->Yi.d[3] = BSWAP4(ctr); |
| 1321 | #else | 1321 | #else |
| @@ -1339,7 +1339,7 @@ int CRYPTO_gcm128_encrypt_ctr32(GCM128_CONTEXT *ctx, | |||
| 1339 | if (len) { | 1339 | if (len) { |
| 1340 | (*ctx->block)(ctx->Yi.c,ctx->EKi.c,key); | 1340 | (*ctx->block)(ctx->Yi.c,ctx->EKi.c,key); |
| 1341 | ++ctr; | 1341 | ++ctr; |
| 1342 | if (_BYTE_ORDER == _LITTLE_ENDIAN) | 1342 | if (BYTE_ORDER == LITTLE_ENDIAN) |
| 1343 | #ifdef BSWAP4 | 1343 | #ifdef BSWAP4 |
| 1344 | ctx->Yi.d[3] = BSWAP4(ctr); | 1344 | ctx->Yi.d[3] = BSWAP4(ctr); |
| 1345 | #else | 1345 | #else |
| @@ -1384,7 +1384,7 @@ int CRYPTO_gcm128_decrypt_ctr32(GCM128_CONTEXT *ctx, | |||
| 1384 | ctx->ares = 0; | 1384 | ctx->ares = 0; |
| 1385 | } | 1385 | } |
| 1386 | 1386 | ||
| 1387 | if (_BYTE_ORDER == _LITTLE_ENDIAN) | 1387 | if (BYTE_ORDER == LITTLE_ENDIAN) |
| 1388 | #ifdef BSWAP4 | 1388 | #ifdef BSWAP4 |
| 1389 | ctr = BSWAP4(ctx->Yi.d[3]); | 1389 | ctr = BSWAP4(ctx->Yi.d[3]); |
| 1390 | #else | 1390 | #else |
| @@ -1413,7 +1413,7 @@ int CRYPTO_gcm128_decrypt_ctr32(GCM128_CONTEXT *ctx, | |||
| 1413 | GHASH(ctx,in,GHASH_CHUNK); | 1413 | GHASH(ctx,in,GHASH_CHUNK); |
| 1414 | (*stream)(in,out,GHASH_CHUNK/16,key,ctx->Yi.c); | 1414 | (*stream)(in,out,GHASH_CHUNK/16,key,ctx->Yi.c); |
| 1415 | ctr += GHASH_CHUNK/16; | 1415 | ctr += GHASH_CHUNK/16; |
| 1416 | if (_BYTE_ORDER == _LITTLE_ENDIAN) | 1416 | if (BYTE_ORDER == LITTLE_ENDIAN) |
| 1417 | #ifdef BSWAP4 | 1417 | #ifdef BSWAP4 |
| 1418 | ctx->Yi.d[3] = BSWAP4(ctr); | 1418 | ctx->Yi.d[3] = BSWAP4(ctr); |
| 1419 | #else | 1419 | #else |
| @@ -1443,7 +1443,7 @@ int CRYPTO_gcm128_decrypt_ctr32(GCM128_CONTEXT *ctx, | |||
| 1443 | #endif | 1443 | #endif |
| 1444 | (*stream)(in,out,j,key,ctx->Yi.c); | 1444 | (*stream)(in,out,j,key,ctx->Yi.c); |
| 1445 | ctr += (unsigned int)j; | 1445 | ctr += (unsigned int)j; |
| 1446 | if (_BYTE_ORDER == _LITTLE_ENDIAN) | 1446 | if (BYTE_ORDER == LITTLE_ENDIAN) |
| 1447 | #ifdef BSWAP4 | 1447 | #ifdef BSWAP4 |
| 1448 | ctx->Yi.d[3] = BSWAP4(ctr); | 1448 | ctx->Yi.d[3] = BSWAP4(ctr); |
| 1449 | #else | 1449 | #else |
| @@ -1458,7 +1458,7 @@ int CRYPTO_gcm128_decrypt_ctr32(GCM128_CONTEXT *ctx, | |||
| 1458 | if (len) { | 1458 | if (len) { |
| 1459 | (*ctx->block)(ctx->Yi.c,ctx->EKi.c,key); | 1459 | (*ctx->block)(ctx->Yi.c,ctx->EKi.c,key); |
| 1460 | ++ctr; | 1460 | ++ctr; |
| 1461 | if (_BYTE_ORDER == _LITTLE_ENDIAN) | 1461 | if (BYTE_ORDER == LITTLE_ENDIAN) |
| 1462 | #ifdef BSWAP4 | 1462 | #ifdef BSWAP4 |
| 1463 | ctx->Yi.d[3] = BSWAP4(ctr); | 1463 | ctx->Yi.d[3] = BSWAP4(ctr); |
| 1464 | #else | 1464 | #else |
| @@ -1490,7 +1490,7 @@ int CRYPTO_gcm128_finish(GCM128_CONTEXT *ctx,const unsigned char *tag, | |||
| 1490 | if (ctx->mres || ctx->ares) | 1490 | if (ctx->mres || ctx->ares) |
| 1491 | GCM_MUL(ctx,Xi); | 1491 | GCM_MUL(ctx,Xi); |
| 1492 | 1492 | ||
| 1493 | if (_BYTE_ORDER == _LITTLE_ENDIAN) { | 1493 | if (BYTE_ORDER == LITTLE_ENDIAN) { |
| 1494 | #ifdef BSWAP8 | 1494 | #ifdef BSWAP8 |
| 1495 | alen = BSWAP8(alen); | 1495 | alen = BSWAP8(alen); |
| 1496 | clen = BSWAP8(clen); | 1496 | clen = BSWAP8(clen); |
