summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorjsing <>2023-08-10 07:18:43 +0000
committerjsing <>2023-08-10 07:18:43 +0000
commit6fffbab1235c36f552a36cd026ebffdfdc63bd59 (patch)
tree459bf932b2ad162bed12e794b0a1a0303f3a757e
parent5ccf52b751e2b7dd5a5c0593f12b501074180fff (diff)
downloadopenbsd-6fffbab1235c36f552a36cd026ebffdfdc63bd59.tar.gz
openbsd-6fffbab1235c36f552a36cd026ebffdfdc63bd59.tar.bz2
openbsd-6fffbab1235c36f552a36cd026ebffdfdc63bd59.zip
Improve byte order handling in gcm128.
Replace a pile of byte order handling mess with htobe*() and be*toh(). ok tb@
-rw-r--r--src/lib/libcrypto/modes/gcm128.c373
1 files changed, 44 insertions, 329 deletions
diff --git a/src/lib/libcrypto/modes/gcm128.c b/src/lib/libcrypto/modes/gcm128.c
index c4fc5cb94f..cbda8ad097 100644
--- a/src/lib/libcrypto/modes/gcm128.c
+++ b/src/lib/libcrypto/modes/gcm128.c
@@ -1,4 +1,4 @@
1/* $OpenBSD: gcm128.c,v 1.25 2023/07/08 14:56:54 beck Exp $ */ 1/* $OpenBSD: gcm128.c,v 1.26 2023/08/10 07:18:43 jsing Exp $ */
2/* ==================================================================== 2/* ====================================================================
3 * Copyright (c) 2010 The OpenSSL Project. All rights reserved. 3 * Copyright (c) 2010 The OpenSSL Project. All rights reserved.
4 * 4 *
@@ -64,8 +64,6 @@
64/* redefine, because alignment is ensured */ 64/* redefine, because alignment is ensured */
65#undef GETU32 65#undef GETU32
66#define GETU32(p) BSWAP4(*(const u32 *)(p)) 66#define GETU32(p) BSWAP4(*(const u32 *)(p))
67#undef PUTU32
68#define PUTU32(p,v) *(u32 *)(p) = BSWAP4(v)
69#endif 67#endif
70 68
71#define PACK(s) ((size_t)(s)<<(sizeof(size_t)*8-16)) 69#define PACK(s) ((size_t)(s)<<(sizeof(size_t)*8-16))
@@ -234,26 +232,8 @@ gcm_gmult_8bit(u64 Xi[2], const u128 Htable[256])
234#endif 232#endif
235 } 233 }
236 234
237#if BYTE_ORDER == LITTLE_ENDIAN 235 Xi[0] = htobe64(Z.hi);
238#ifdef BSWAP8 236 Xi[1] = htobe64(Z.lo);
239 Xi[0] = BSWAP8(Z.hi);
240 Xi[1] = BSWAP8(Z.lo);
241#else
242 u8 *p = (u8 *)Xi;
243 u32 v;
244 v = (u32)(Z.hi >> 32);
245 PUTU32(p, v);
246 v = (u32)(Z.hi);
247 PUTU32(p + 4, v);
248 v = (u32)(Z.lo >> 32);
249 PUTU32(p + 8, v);
250 v = (u32)(Z.lo);
251 PUTU32(p + 12, v);
252#endif
253#else /* BIG_ENDIAN */
254 Xi[0] = Z.hi;
255 Xi[1] = Z.lo;
256#endif
257} 237}
258#define GCM_MUL(ctx,Xi) gcm_gmult_8bit(ctx->Xi.u,ctx->Htable) 238#define GCM_MUL(ctx,Xi) gcm_gmult_8bit(ctx->Xi.u,ctx->Htable)
259 239
@@ -389,26 +369,8 @@ gcm_gmult_4bit(u64 Xi[2], const u128 Htable[16])
389 Z.lo ^= Htable[nlo].lo; 369 Z.lo ^= Htable[nlo].lo;
390 } 370 }
391 371
392#if BYTE_ORDER == LITTLE_ENDIAN 372 Xi[0] = htobe64(Z.hi);
393#ifdef BSWAP8 373 Xi[1] = htobe64(Z.lo);
394 Xi[0] = BSWAP8(Z.hi);
395 Xi[1] = BSWAP8(Z.lo);
396#else
397 u8 *p = (u8 *)Xi;
398 u32 v;
399 v = (u32)(Z.hi >> 32);
400 PUTU32(p, v);
401 v = (u32)(Z.hi);
402 PUTU32(p + 4, v);
403 v = (u32)(Z.lo >> 32);
404 PUTU32(p + 8, v);
405 v = (u32)(Z.lo);
406 PUTU32(p + 12, v);
407#endif
408#else /* BIG_ENDIAN */
409 Xi[0] = Z.hi;
410 Xi[1] = Z.lo;
411#endif
412} 374}
413 375
414#if !defined(OPENSSL_SMALL_FOOTPRINT) 376#if !defined(OPENSSL_SMALL_FOOTPRINT)
@@ -563,26 +525,8 @@ gcm_ghash_4bit(u64 Xi[2], const u128 Htable[16],
563 Z.hi ^= ((u64)rem_8bit[rem << 4]) << 48; 525 Z.hi ^= ((u64)rem_8bit[rem << 4]) << 48;
564#endif 526#endif
565 527
566#if BYTE_ORDER == LITTLE_ENDIAN 528 Xi[0] = htobe64(Z.hi);
567#ifdef BSWAP8 529 Xi[1] = htobe64(Z.lo);
568 Xi[0] = BSWAP8(Z.hi);
569 Xi[1] = BSWAP8(Z.lo);
570#else
571 u8 *p = (u8 *)Xi;
572 u32 v;
573 v = (u32)(Z.hi >> 32);
574 PUTU32(p, v);
575 v = (u32)(Z.hi);
576 PUTU32(p + 4, v);
577 v = (u32)(Z.lo >> 32);
578 PUTU32(p + 8, v);
579 v = (u32)(Z.lo);
580 PUTU32(p + 12, v);
581#endif
582#else /* BIG_ENDIAN */
583 Xi[0] = Z.hi;
584 Xi[1] = Z.lo;
585#endif
586 } while (inp += 16, len -= 16); 530 } while (inp += 16, len -= 16);
587} 531}
588#endif 532#endif
@@ -640,26 +584,8 @@ gcm_gmult_1bit(u64 Xi[2], const u64 H[2])
640 } 584 }
641 } 585 }
642 586
643#if BYTE_ORDER == LITTLE_ENDIAN 587 Xi[0] = htobe64(Z.hi);
644#ifdef BSWAP8 588 Xi[1] = htobe64(Z.lo);
645 Xi[0] = BSWAP8(Z.hi);
646 Xi[1] = BSWAP8(Z.lo);
647#else
648 u8 *p = (u8 *)Xi;
649 u32 v;
650 v = (u32)(Z.hi >> 32);
651 PUTU32(p, v);
652 v = (u32)(Z.hi);
653 PUTU32(p + 4, v);
654 v = (u32)(Z.lo >> 32);
655 PUTU32(p + 8, v);
656 v = (u32)(Z.lo);
657 PUTU32(p + 12, v);
658#endif
659#else /* BIG_ENDIAN */
660 Xi[0] = Z.hi;
661 Xi[1] = Z.lo;
662#endif
663} 589}
664#define GCM_MUL(ctx,Xi) gcm_gmult_1bit(ctx->Xi.u,ctx->H.u) 590#define GCM_MUL(ctx,Xi) gcm_gmult_1bit(ctx->Xi.u,ctx->H.u)
665 591
@@ -724,20 +650,9 @@ CRYPTO_gcm128_init(GCM128_CONTEXT *ctx, void *key, block128_f block)
724 650
725 (*block)(ctx->H.c, ctx->H.c, key); 651 (*block)(ctx->H.c, ctx->H.c, key);
726 652
727#if BYTE_ORDER == LITTLE_ENDIAN
728 /* H is stored in host byte order */ 653 /* H is stored in host byte order */
729#ifdef BSWAP8 654 ctx->H.u[0] = be64toh(ctx->H.u[0]);
730 ctx->H.u[0] = BSWAP8(ctx->H.u[0]); 655 ctx->H.u[1] = be64toh(ctx->H.u[1]);
731 ctx->H.u[1] = BSWAP8(ctx->H.u[1]);
732#else
733 u8 *p = ctx->H.c;
734 u64 hi, lo;
735 hi = (u64)GETU32(p) << 32|GETU32(p + 4);
736 lo = (u64)GETU32(p + 8) << 32|GETU32(p + 12);
737 ctx->H.u[0] = hi;
738 ctx->H.u[1] = lo;
739#endif
740#endif
741 656
742#if TABLE_BITS==8 657#if TABLE_BITS==8
743 gcm_init_8bit(ctx->Htable, ctx->H.u); 658 gcm_init_8bit(ctx->Htable, ctx->H.u);
@@ -824,47 +739,16 @@ CRYPTO_gcm128_setiv(GCM128_CONTEXT *ctx, const unsigned char *iv, size_t len)
824 GCM_MUL(ctx, Yi); 739 GCM_MUL(ctx, Yi);
825 } 740 }
826 len0 <<= 3; 741 len0 <<= 3;
827#if BYTE_ORDER == LITTLE_ENDIAN 742 ctx->Yi.u[1] ^= htobe64(len0);
828#ifdef BSWAP8
829 ctx->Yi.u[1] ^= BSWAP8(len0);
830#else
831 ctx->Yi.c[8] ^= (u8)(len0 >> 56);
832 ctx->Yi.c[9] ^= (u8)(len0 >> 48);
833 ctx->Yi.c[10] ^= (u8)(len0 >> 40);
834 ctx->Yi.c[11] ^= (u8)(len0 >> 32);
835 ctx->Yi.c[12] ^= (u8)(len0 >> 24);
836 ctx->Yi.c[13] ^= (u8)(len0 >> 16);
837 ctx->Yi.c[14] ^= (u8)(len0 >> 8);
838 ctx->Yi.c[15] ^= (u8)(len0);
839#endif
840#else /* BIG_ENDIAN */
841 ctx->Yi.u[1] ^= len0;
842#endif
843 743
844 GCM_MUL(ctx, Yi); 744 GCM_MUL(ctx, Yi);
845 745
846#if BYTE_ORDER == LITTLE_ENDIAN 746 ctr = be32toh(ctx->Yi.d[3]);
847#ifdef BSWAP4
848 ctr = BSWAP4(ctx->Yi.d[3]);
849#else
850 ctr = GETU32(ctx->Yi.c + 12);
851#endif
852#else /* BIG_ENDIAN */
853 ctr = ctx->Yi.d[3];
854#endif
855 } 747 }
856 748
857 (*ctx->block)(ctx->Yi.c, ctx->EK0.c, ctx->key); 749 (*ctx->block)(ctx->Yi.c, ctx->EK0.c, ctx->key);
858 ++ctr; 750 ++ctr;
859#if BYTE_ORDER == LITTLE_ENDIAN 751 ctx->Yi.d[3] = htobe32(ctr);
860#ifdef BSWAP4
861 ctx->Yi.d[3] = BSWAP4(ctr);
862#else
863 PUTU32(ctx->Yi.c + 12, ctr);
864#endif
865#else /* BIG_ENDIAN */
866 ctx->Yi.d[3] = ctr;
867#endif
868} 752}
869LCRYPTO_ALIAS(CRYPTO_gcm128_setiv); 753LCRYPTO_ALIAS(CRYPTO_gcm128_setiv);
870 754
@@ -960,15 +844,7 @@ CRYPTO_gcm128_encrypt(GCM128_CONTEXT *ctx,
960 ctx->ares = 0; 844 ctx->ares = 0;
961 } 845 }
962 846
963#if BYTE_ORDER == LITTLE_ENDIAN 847 ctr = be32toh(ctx->Yi.d[3]);
964#ifdef BSWAP4
965 ctr = BSWAP4(ctx->Yi.d[3]);
966#else
967 ctr = GETU32(ctx->Yi.c + 12);
968#endif
969#else /* BIG_ENDIAN */
970 ctr = ctx->Yi.d[3];
971#endif
972 848
973 n = ctx->mres; 849 n = ctx->mres;
974#if !defined(OPENSSL_SMALL_FOOTPRINT) 850#if !defined(OPENSSL_SMALL_FOOTPRINT)
@@ -1002,15 +878,8 @@ CRYPTO_gcm128_encrypt(GCM128_CONTEXT *ctx,
1002 878
1003 (*block)(ctx->Yi.c, ctx->EKi.c, key); 879 (*block)(ctx->Yi.c, ctx->EKi.c, key);
1004 ++ctr; 880 ++ctr;
1005#if BYTE_ORDER == LITTLE_ENDIAN 881 ctx->Yi.d[3] = htobe32(ctr);
1006#ifdef BSWAP4 882
1007 ctx->Yi.d[3] = BSWAP4(ctr);
1008#else
1009 PUTU32(ctx->Yi.c + 12, ctr);
1010#endif
1011#else /* BIG_ENDIAN */
1012 ctx->Yi.d[3] = ctr;
1013#endif
1014 for (i = 0; i < 16/sizeof(size_t); ++i) 883 for (i = 0; i < 16/sizeof(size_t); ++i)
1015 out_t[i] = in_t[i] ^ 884 out_t[i] = in_t[i] ^
1016 ctx->EKi.t[i]; 885 ctx->EKi.t[i];
@@ -1030,15 +899,8 @@ CRYPTO_gcm128_encrypt(GCM128_CONTEXT *ctx,
1030 899
1031 (*block)(ctx->Yi.c, ctx->EKi.c, key); 900 (*block)(ctx->Yi.c, ctx->EKi.c, key);
1032 ++ctr; 901 ++ctr;
1033#if BYTE_ORDER == LITTLE_ENDIAN 902 ctx->Yi.d[3] = htobe32(ctr);
1034#ifdef BSWAP4 903
1035 ctx->Yi.d[3] = BSWAP4(ctr);
1036#else
1037 PUTU32(ctx->Yi.c + 12, ctr);
1038#endif
1039#else /* BIG_ENDIAN */
1040 ctx->Yi.d[3] = ctr;
1041#endif
1042 for (i = 0; i < 16/sizeof(size_t); ++i) 904 for (i = 0; i < 16/sizeof(size_t); ++i)
1043 out_t[i] = in_t[i] ^ 905 out_t[i] = in_t[i] ^
1044 ctx->EKi.t[i]; 906 ctx->EKi.t[i];
@@ -1055,15 +917,8 @@ CRYPTO_gcm128_encrypt(GCM128_CONTEXT *ctx,
1055 917
1056 (*block)(ctx->Yi.c, ctx->EKi.c, key); 918 (*block)(ctx->Yi.c, ctx->EKi.c, key);
1057 ++ctr; 919 ++ctr;
1058#if BYTE_ORDER == LITTLE_ENDIAN 920 ctx->Yi.d[3] = htobe32(ctr);
1059#ifdef BSWAP4 921
1060 ctx->Yi.d[3] = BSWAP4(ctr);
1061#else
1062 PUTU32(ctx->Yi.c + 12, ctr);
1063#endif
1064#else /* BIG_ENDIAN */
1065 ctx->Yi.d[3] = ctr;
1066#endif
1067 for (i = 0; i < 16/sizeof(size_t); ++i) 922 for (i = 0; i < 16/sizeof(size_t); ++i)
1068 ctx->Xi.t[i] ^= 923 ctx->Xi.t[i] ^=
1069 out_t[i] = in_t[i] ^ ctx->EKi.t[i]; 924 out_t[i] = in_t[i] ^ ctx->EKi.t[i];
@@ -1076,15 +931,8 @@ CRYPTO_gcm128_encrypt(GCM128_CONTEXT *ctx,
1076 if (len) { 931 if (len) {
1077 (*block)(ctx->Yi.c, ctx->EKi.c, key); 932 (*block)(ctx->Yi.c, ctx->EKi.c, key);
1078 ++ctr; 933 ++ctr;
1079#if BYTE_ORDER == LITTLE_ENDIAN 934 ctx->Yi.d[3] = htobe32(ctr);
1080#ifdef BSWAP4 935
1081 ctx->Yi.d[3] = BSWAP4(ctr);
1082#else
1083 PUTU32(ctx->Yi.c + 12, ctr);
1084#endif
1085#else /* BIG_ENDIAN */
1086 ctx->Yi.d[3] = ctr;
1087#endif
1088 while (len--) { 936 while (len--) {
1089 ctx->Xi.c[n] ^= out[n] = in[n] ^ 937 ctx->Xi.c[n] ^= out[n] = in[n] ^
1090 ctx->EKi.c[n]; 938 ctx->EKi.c[n];
@@ -1100,15 +948,7 @@ CRYPTO_gcm128_encrypt(GCM128_CONTEXT *ctx,
1100 if (n == 0) { 948 if (n == 0) {
1101 (*block)(ctx->Yi.c, ctx->EKi.c, key); 949 (*block)(ctx->Yi.c, ctx->EKi.c, key);
1102 ++ctr; 950 ++ctr;
1103#if BYTE_ORDER == LITTLE_ENDIAN 951 ctx->Yi.d[3] = htobe32(ctr);
1104#ifdef BSWAP4
1105 ctx->Yi.d[3] = BSWAP4(ctr);
1106#else
1107 PUTU32(ctx->Yi.c + 12, ctr);
1108#endif
1109#else /* BIG_ENDIAN */
1110 ctx->Yi.d[3] = ctr;
1111#endif
1112 } 952 }
1113 ctx->Xi.c[n] ^= out[i] = in[i] ^ ctx->EKi.c[n]; 953 ctx->Xi.c[n] ^= out[i] = in[i] ^ ctx->EKi.c[n];
1114 n = (n + 1) % 16; 954 n = (n + 1) % 16;
@@ -1150,15 +990,7 @@ CRYPTO_gcm128_decrypt(GCM128_CONTEXT *ctx,
1150 ctx->ares = 0; 990 ctx->ares = 0;
1151 } 991 }
1152 992
1153#if BYTE_ORDER == LITTLE_ENDIAN 993 ctr = be32toh(ctx->Yi.d[3]);
1154#ifdef BSWAP4
1155 ctr = BSWAP4(ctx->Yi.d[3]);
1156#else
1157 ctr = GETU32(ctx->Yi.c + 12);
1158#endif
1159#else /* BIG_ENDIAN */
1160 ctr = ctx->Yi.d[3];
1161#endif
1162 994
1163 n = ctx->mres; 995 n = ctx->mres;
1164#if !defined(OPENSSL_SMALL_FOOTPRINT) 996#if !defined(OPENSSL_SMALL_FOOTPRINT)
@@ -1194,15 +1026,8 @@ CRYPTO_gcm128_decrypt(GCM128_CONTEXT *ctx,
1194 1026
1195 (*block)(ctx->Yi.c, ctx->EKi.c, key); 1027 (*block)(ctx->Yi.c, ctx->EKi.c, key);
1196 ++ctr; 1028 ++ctr;
1197#if BYTE_ORDER == LITTLE_ENDIAN 1029 ctx->Yi.d[3] = htobe32(ctr);
1198#ifdef BSWAP4 1030
1199 ctx->Yi.d[3] = BSWAP4(ctr);
1200#else
1201 PUTU32(ctx->Yi.c + 12, ctr);
1202#endif
1203#else /* BIG_ENDIAN */
1204 ctx->Yi.d[3] = ctr;
1205#endif
1206 for (i = 0; i < 16/sizeof(size_t); ++i) 1031 for (i = 0; i < 16/sizeof(size_t); ++i)
1207 out_t[i] = in_t[i] ^ 1032 out_t[i] = in_t[i] ^
1208 ctx->EKi.t[i]; 1033 ctx->EKi.t[i];
@@ -1220,15 +1045,8 @@ CRYPTO_gcm128_decrypt(GCM128_CONTEXT *ctx,
1220 1045
1221 (*block)(ctx->Yi.c, ctx->EKi.c, key); 1046 (*block)(ctx->Yi.c, ctx->EKi.c, key);
1222 ++ctr; 1047 ++ctr;
1223#if BYTE_ORDER == LITTLE_ENDIAN 1048 ctx->Yi.d[3] = htobe32(ctr);
1224#ifdef BSWAP4 1049
1225 ctx->Yi.d[3] = BSWAP4(ctr);
1226#else
1227 PUTU32(ctx->Yi.c + 12, ctr);
1228#endif
1229#else /* BIG_ENDIAN */
1230 ctx->Yi.d[3] = ctr;
1231#endif
1232 for (i = 0; i < 16/sizeof(size_t); ++i) 1050 for (i = 0; i < 16/sizeof(size_t); ++i)
1233 out_t[i] = in_t[i] ^ 1051 out_t[i] = in_t[i] ^
1234 ctx->EKi.t[i]; 1052 ctx->EKi.t[i];
@@ -1244,15 +1062,8 @@ CRYPTO_gcm128_decrypt(GCM128_CONTEXT *ctx,
1244 1062
1245 (*block)(ctx->Yi.c, ctx->EKi.c, key); 1063 (*block)(ctx->Yi.c, ctx->EKi.c, key);
1246 ++ctr; 1064 ++ctr;
1247#if BYTE_ORDER == LITTLE_ENDIAN 1065 ctx->Yi.d[3] = htobe32(ctr);
1248#ifdef BSWAP4 1066
1249 ctx->Yi.d[3] = BSWAP4(ctr);
1250#else
1251 PUTU32(ctx->Yi.c + 12, ctr);
1252#endif
1253#else /* BIG_ENDIAN */
1254 ctx->Yi.d[3] = ctr;
1255#endif
1256 for (i = 0; i < 16/sizeof(size_t); ++i) { 1067 for (i = 0; i < 16/sizeof(size_t); ++i) {
1257 size_t c = in[i]; 1068 size_t c = in[i];
1258 out[i] = c ^ ctx->EKi.t[i]; 1069 out[i] = c ^ ctx->EKi.t[i];
@@ -1267,15 +1078,8 @@ CRYPTO_gcm128_decrypt(GCM128_CONTEXT *ctx,
1267 if (len) { 1078 if (len) {
1268 (*block)(ctx->Yi.c, ctx->EKi.c, key); 1079 (*block)(ctx->Yi.c, ctx->EKi.c, key);
1269 ++ctr; 1080 ++ctr;
1270#if BYTE_ORDER == LITTLE_ENDIAN 1081 ctx->Yi.d[3] = htobe32(ctr);
1271#ifdef BSWAP4 1082
1272 ctx->Yi.d[3] = BSWAP4(ctr);
1273#else
1274 PUTU32(ctx->Yi.c + 12, ctr);
1275#endif
1276#else /* BIG_ENDIAN */
1277 ctx->Yi.d[3] = ctr;
1278#endif
1279 while (len--) { 1083 while (len--) {
1280 u8 c = in[n]; 1084 u8 c = in[n];
1281 ctx->Xi.c[n] ^= c; 1085 ctx->Xi.c[n] ^= c;
@@ -1293,15 +1097,7 @@ CRYPTO_gcm128_decrypt(GCM128_CONTEXT *ctx,
1293 if (n == 0) { 1097 if (n == 0) {
1294 (*block)(ctx->Yi.c, ctx->EKi.c, key); 1098 (*block)(ctx->Yi.c, ctx->EKi.c, key);
1295 ++ctr; 1099 ++ctr;
1296#if BYTE_ORDER == LITTLE_ENDIAN 1100 ctx->Yi.d[3] = htobe32(ctr);
1297#ifdef BSWAP4
1298 ctx->Yi.d[3] = BSWAP4(ctr);
1299#else
1300 PUTU32(ctx->Yi.c + 12, ctr);
1301#endif
1302#else /* BIG_ENDIAN */
1303 ctx->Yi.d[3] = ctr;
1304#endif
1305 } 1101 }
1306 c = in[i]; 1102 c = in[i];
1307 out[i] = c ^ ctx->EKi.c[n]; 1103 out[i] = c ^ ctx->EKi.c[n];
@@ -1344,15 +1140,7 @@ CRYPTO_gcm128_encrypt_ctr32(GCM128_CONTEXT *ctx,
1344 ctx->ares = 0; 1140 ctx->ares = 0;
1345 } 1141 }
1346 1142
1347#if BYTE_ORDER == LITTLE_ENDIAN 1143 ctr = be32toh(ctx->Yi.d[3]);
1348#ifdef BSWAP4
1349 ctr = BSWAP4(ctx->Yi.d[3]);
1350#else
1351 ctr = GETU32(ctx->Yi.c + 12);
1352#endif
1353#else /* BIG_ENDIAN */
1354 ctr = ctx->Yi.d[3];
1355#endif
1356 1144
1357 n = ctx->mres; 1145 n = ctx->mres;
1358 if (n) { 1146 if (n) {
@@ -1372,15 +1160,7 @@ CRYPTO_gcm128_encrypt_ctr32(GCM128_CONTEXT *ctx,
1372 while (len >= GHASH_CHUNK) { 1160 while (len >= GHASH_CHUNK) {
1373 (*stream)(in, out, GHASH_CHUNK/16, key, ctx->Yi.c); 1161 (*stream)(in, out, GHASH_CHUNK/16, key, ctx->Yi.c);
1374 ctr += GHASH_CHUNK/16; 1162 ctr += GHASH_CHUNK/16;
1375#if BYTE_ORDER == LITTLE_ENDIAN 1163 ctx->Yi.d[3] = htobe32(ctr);
1376#ifdef BSWAP4
1377 ctx->Yi.d[3] = BSWAP4(ctr);
1378#else
1379 PUTU32(ctx->Yi.c + 12, ctr);
1380#endif
1381#else /* BIG_ENDIAN */
1382 ctx->Yi.d[3] = ctr;
1383#endif
1384 GHASH(ctx, out, GHASH_CHUNK); 1164 GHASH(ctx, out, GHASH_CHUNK);
1385 out += GHASH_CHUNK; 1165 out += GHASH_CHUNK;
1386 in += GHASH_CHUNK; 1166 in += GHASH_CHUNK;
@@ -1392,15 +1172,7 @@ CRYPTO_gcm128_encrypt_ctr32(GCM128_CONTEXT *ctx,
1392 1172
1393 (*stream)(in, out, j, key, ctx->Yi.c); 1173 (*stream)(in, out, j, key, ctx->Yi.c);
1394 ctr += (unsigned int)j; 1174 ctr += (unsigned int)j;
1395#if BYTE_ORDER == LITTLE_ENDIAN 1175 ctx->Yi.d[3] = htobe32(ctr);
1396#ifdef BSWAP4
1397 ctx->Yi.d[3] = BSWAP4(ctr);
1398#else
1399 PUTU32(ctx->Yi.c + 12, ctr);
1400#endif
1401#else /* BIG_ENDIAN */
1402 ctx->Yi.d[3] = ctr;
1403#endif
1404 in += i; 1176 in += i;
1405 len -= i; 1177 len -= i;
1406#if defined(GHASH) 1178#if defined(GHASH)
@@ -1418,15 +1190,7 @@ CRYPTO_gcm128_encrypt_ctr32(GCM128_CONTEXT *ctx,
1418 if (len) { 1190 if (len) {
1419 (*ctx->block)(ctx->Yi.c, ctx->EKi.c, key); 1191 (*ctx->block)(ctx->Yi.c, ctx->EKi.c, key);
1420 ++ctr; 1192 ++ctr;
1421#if BYTE_ORDER == LITTLE_ENDIAN 1193 ctx->Yi.d[3] = htobe32(ctr);
1422#ifdef BSWAP4
1423 ctx->Yi.d[3] = BSWAP4(ctr);
1424#else
1425 PUTU32(ctx->Yi.c + 12, ctr);
1426#endif
1427#else /* BIG_ENDIAN */
1428 ctx->Yi.d[3] = ctr;
1429#endif
1430 while (len--) { 1194 while (len--) {
1431 ctx->Xi.c[n] ^= out[n] = in[n] ^ ctx->EKi.c[n]; 1195 ctx->Xi.c[n] ^= out[n] = in[n] ^ ctx->EKi.c[n];
1432 ++n; 1196 ++n;
@@ -1466,15 +1230,7 @@ CRYPTO_gcm128_decrypt_ctr32(GCM128_CONTEXT *ctx,
1466 ctx->ares = 0; 1230 ctx->ares = 0;
1467 } 1231 }
1468 1232
1469#if BYTE_ORDER == LITTLE_ENDIAN 1233 ctr = be32toh(ctx->Yi.d[3]);
1470#ifdef BSWAP4
1471 ctr = BSWAP4(ctx->Yi.d[3]);
1472#else
1473 ctr = GETU32(ctx->Yi.c + 12);
1474#endif
1475#else /* BIG_ENDIAN */
1476 ctr = ctx->Yi.d[3];
1477#endif
1478 1234
1479 n = ctx->mres; 1235 n = ctx->mres;
1480 if (n) { 1236 if (n) {
@@ -1497,15 +1253,7 @@ CRYPTO_gcm128_decrypt_ctr32(GCM128_CONTEXT *ctx,
1497 GHASH(ctx, in, GHASH_CHUNK); 1253 GHASH(ctx, in, GHASH_CHUNK);
1498 (*stream)(in, out, GHASH_CHUNK/16, key, ctx->Yi.c); 1254 (*stream)(in, out, GHASH_CHUNK/16, key, ctx->Yi.c);
1499 ctr += GHASH_CHUNK/16; 1255 ctr += GHASH_CHUNK/16;
1500#if BYTE_ORDER == LITTLE_ENDIAN 1256 ctx->Yi.d[3] = htobe32(ctr);
1501#ifdef BSWAP4
1502 ctx->Yi.d[3] = BSWAP4(ctr);
1503#else
1504 PUTU32(ctx->Yi.c + 12, ctr);
1505#endif
1506#else /* BIG_ENDIAN */
1507 ctx->Yi.d[3] = ctr;
1508#endif
1509 out += GHASH_CHUNK; 1257 out += GHASH_CHUNK;
1510 in += GHASH_CHUNK; 1258 in += GHASH_CHUNK;
1511 len -= GHASH_CHUNK; 1259 len -= GHASH_CHUNK;
@@ -1529,15 +1277,7 @@ CRYPTO_gcm128_decrypt_ctr32(GCM128_CONTEXT *ctx,
1529#endif 1277#endif
1530 (*stream)(in, out, j, key, ctx->Yi.c); 1278 (*stream)(in, out, j, key, ctx->Yi.c);
1531 ctr += (unsigned int)j; 1279 ctr += (unsigned int)j;
1532#if BYTE_ORDER == LITTLE_ENDIAN 1280 ctx->Yi.d[3] = htobe32(ctr);
1533#ifdef BSWAP4
1534 ctx->Yi.d[3] = BSWAP4(ctr);
1535#else
1536 PUTU32(ctx->Yi.c + 12, ctr);
1537#endif
1538#else /* BIG_ENDIAN */
1539 ctx->Yi.d[3] = ctr;
1540#endif
1541 out += i; 1281 out += i;
1542 in += i; 1282 in += i;
1543 len -= i; 1283 len -= i;
@@ -1545,15 +1285,7 @@ CRYPTO_gcm128_decrypt_ctr32(GCM128_CONTEXT *ctx,
1545 if (len) { 1285 if (len) {
1546 (*ctx->block)(ctx->Yi.c, ctx->EKi.c, key); 1286 (*ctx->block)(ctx->Yi.c, ctx->EKi.c, key);
1547 ++ctr; 1287 ++ctr;
1548#if BYTE_ORDER == LITTLE_ENDIAN 1288 ctx->Yi.d[3] = htobe32(ctr);
1549#ifdef BSWAP4
1550 ctx->Yi.d[3] = BSWAP4(ctr);
1551#else
1552 PUTU32(ctx->Yi.c + 12, ctr);
1553#endif
1554#else /* BIG_ENDIAN */
1555 ctx->Yi.d[3] = ctr;
1556#endif
1557 while (len--) { 1289 while (len--) {
1558 u8 c = in[n]; 1290 u8 c = in[n];
1559 ctx->Xi.c[n] ^= c; 1291 ctx->Xi.c[n] ^= c;
@@ -1580,25 +1312,8 @@ CRYPTO_gcm128_finish(GCM128_CONTEXT *ctx, const unsigned char *tag,
1580 if (ctx->mres || ctx->ares) 1312 if (ctx->mres || ctx->ares)
1581 GCM_MUL(ctx, Xi); 1313 GCM_MUL(ctx, Xi);
1582 1314
1583#if BYTE_ORDER == LITTLE_ENDIAN 1315 ctx->Xi.u[0] ^= htobe64(alen);
1584#ifdef BSWAP8 1316 ctx->Xi.u[1] ^= htobe64(clen);
1585 alen = BSWAP8(alen);
1586 clen = BSWAP8(clen);
1587#else
1588 {
1589 u8 *p = ctx->len.c;
1590
1591 ctx->len.u[0] = alen;
1592 ctx->len.u[1] = clen;
1593
1594 alen = (u64)GETU32(p) << 32|GETU32(p + 4);
1595 clen = (u64)GETU32(p + 8) << 32|GETU32(p + 12);
1596 }
1597#endif
1598#endif
1599
1600 ctx->Xi.u[0] ^= alen;
1601 ctx->Xi.u[1] ^= clen;
1602 GCM_MUL(ctx, Xi); 1317 GCM_MUL(ctx, Xi);
1603 1318
1604 ctx->Xi.u[0] ^= ctx->EK0.u[0]; 1319 ctx->Xi.u[0] ^= ctx->EK0.u[0];