From 5dcdc958812ab0f9a065863fd2678d86c850feb9 Mon Sep 17 00:00:00 2001
From: bcook <>
Date: Sun, 13 Aug 2017 17:46:24 +0000
Subject: move endian/word size checks from runtime to compile time

ok guenther@
---
 src/lib/libcrypto/modes/ctr128.c |  19 +-
 src/lib/libcrypto/modes/gcm128.c | 510 ++++++++++++++++++++-------------------
 src/lib/libcrypto/modes/xts128.c |  70 +++---
 src/lib/libcrypto/rc4/rc4_enc.c  |  66 ++---
 4 files changed, 340 insertions(+), 325 deletions(-)

diff --git a/src/lib/libcrypto/modes/ctr128.c b/src/lib/libcrypto/modes/ctr128.c
index 7fd0223701..23979d64f1 100644
--- a/src/lib/libcrypto/modes/ctr128.c
+++ b/src/lib/libcrypto/modes/ctr128.c
@@ -1,4 +1,4 @@
-/* $OpenBSD: ctr128.c,v 1.6 2015/02/10 09:46:30 miod Exp $ */
+/* $OpenBSD: ctr128.c,v 1.7 2017/08/13 17:46:24 bcook Exp $ */
 /* ====================================================================
  * Copyright (c) 2008 The OpenSSL Project.  All rights reserved.
  *
@@ -81,22 +81,21 @@ static void ctr128_inc(unsigned char *counter) {
 static void
 ctr128_inc_aligned(unsigned char *counter)
 {
-	size_t *data,c,n;
-
-	if (BYTE_ORDER == LITTLE_ENDIAN) {
-		ctr128_inc(counter);
-		return;
-	}
-
+#if BYTE_ORDER == LITTLE_ENDIAN
+	ctr128_inc(counter);
+#else
+	size_t *data, c, n;
 	data = (size_t *)counter;
-	n = 16/sizeof(size_t);
+	n = 16 / sizeof(size_t);
 	do {
 		--n;
 		c = data[n];
 		++c;
 		data[n] = c;
-		if (c) return;
+		if (c)
+			return;
 	} while (n);
+#endif
 }
 #endif
 
diff --git a/src/lib/libcrypto/modes/gcm128.c b/src/lib/libcrypto/modes/gcm128.c
index c080d9c16a..0b29d9850d 100644
--- a/src/lib/libcrypto/modes/gcm128.c
+++ b/src/lib/libcrypto/modes/gcm128.c
@@ -1,4 +1,4 @@
-/* $OpenBSD: gcm128.c,v 1.16 2017/05/02 03:59:44 deraadt Exp $ */
+/* $OpenBSD: gcm128.c,v 1.17 2017/08/13 17:46:24 bcook Exp $ */
 /* ====================================================================
  * Copyright (c) 2010 The OpenSSL Project.  All rights reserved.
  *
@@ -224,29 +224,29 @@ static void gcm_gmult_8bit(u64 Xi[2], const u128 Htable[256])
 		rem  = (size_t)Z.lo&0xff;
 		Z.lo = (Z.hi<<56)|(Z.lo>>8);
 		Z.hi = (Z.hi>>8);
-		if (sizeof(size_t)==8)
-			Z.hi ^= rem_8bit[rem];
-		else
-			Z.hi ^= (u64)rem_8bit[rem]<<32;
+#ifdef _LP64
+		Z.hi ^= rem_8bit[rem];
+#else
+		Z.hi ^= (u64)rem_8bit[rem]<<32;
+#endif
 	}
 
-	if (BYTE_ORDER == LITTLE_ENDIAN) {
+#if BYTE_ORDER == LITTLE_ENDIAN
 #ifdef BSWAP8
-		Xi[0] = BSWAP8(Z.hi);
-		Xi[1] = BSWAP8(Z.lo);
+	Xi[0] = BSWAP8(Z.hi);
+	Xi[1] = BSWAP8(Z.lo);
 #else
-		u8 *p = (u8 *)Xi;
-		u32 v;
-		v = (u32)(Z.hi>>32);	PUTU32(p,v);
-		v = (u32)(Z.hi);	PUTU32(p+4,v);
-		v = (u32)(Z.lo>>32);	PUTU32(p+8,v);
-		v = (u32)(Z.lo);	PUTU32(p+12,v);
+	u8 *p = (u8 *)Xi;
+	u32 v;
+	v = (u32)(Z.hi>>32);	PUTU32(p,v);
+	v = (u32)(Z.hi);	PUTU32(p+4,v);
+	v = (u32)(Z.lo>>32);	PUTU32(p+8,v);
+	v = (u32)(Z.lo);	PUTU32(p+12,v);
+#endif
+#else /* BIG_ENDIAN */
+	Xi[0] = Z.hi;
+	Xi[1] = Z.lo;
 #endif
-	}
-	else {
-		Xi[0] = Z.hi;
-		Xi[1] = Z.lo;
-	}
 }
 #define GCM_MUL(ctx,Xi)   gcm_gmult_8bit(ctx->Xi.u,ctx->Htable)
 
@@ -307,19 +307,19 @@ static void gcm_init_4bit(u128 Htable[16], u64 H[2])
 	{
 	int j;
 
-	if (BYTE_ORDER == LITTLE_ENDIAN)
-		for (j=0;j<16;++j) {
-			V = Htable[j];
-			Htable[j].hi = V.lo;
-			Htable[j].lo = V.hi;
-		}
-	else
-		for (j=0;j<16;++j) {
-			V = Htable[j];
-			Htable[j].hi = V.lo<<32|V.lo>>32;
-			Htable[j].lo = V.hi<<32|V.hi>>32;
-		}
+#if BYTE_ORDER == LITTLE_ENDIAN
+	for (j=0;j<16;++j) {
+		V = Htable[j];
+		Htable[j].hi = V.lo;
+		Htable[j].lo = V.hi;
 	}
+#else /* BIG_ENDIAN */
+	for (j=0;j<16;++j) {
+		V = Htable[j];
+		Htable[j].hi = V.lo<<32|V.lo>>32;
+		Htable[j].lo = V.hi<<32|V.hi>>32;
+	}
+#endif
 #endif
 }
 
@@ -347,11 +347,11 @@ static void gcm_gmult_4bit(u64 Xi[2], const u128 Htable[16])
 		rem  = (size_t)Z.lo&0xf;
 		Z.lo = (Z.hi<<60)|(Z.lo>>4);
 		Z.hi = (Z.hi>>4);
-		if (sizeof(size_t)==8)
-			Z.hi ^= rem_4bit[rem];
-		else
-			Z.hi ^= (u64)rem_4bit[rem]<<32;
-
+#ifdef _LP64
+		Z.hi ^= rem_4bit[rem];
+#else
+		Z.hi ^= (u64)rem_4bit[rem]<<32;
+#endif
 		Z.hi ^= Htable[nhi].hi;
 		Z.lo ^= Htable[nhi].lo;
 
@@ -364,32 +364,31 @@ static void gcm_gmult_4bit(u64 Xi[2], const u128 Htable[16])
 		rem  = (size_t)Z.lo&0xf;
 		Z.lo = (Z.hi<<60)|(Z.lo>>4);
 		Z.hi = (Z.hi>>4);
-		if (sizeof(size_t)==8)
-			Z.hi ^= rem_4bit[rem];
-		else
-			Z.hi ^= (u64)rem_4bit[rem]<<32;
-
+#ifdef _LP64
+		Z.hi ^= rem_4bit[rem];
+#else
+		Z.hi ^= (u64)rem_4bit[rem]<<32;
+#endif
 		Z.hi ^= Htable[nlo].hi;
 		Z.lo ^= Htable[nlo].lo;
 	}
 
-	if (BYTE_ORDER == LITTLE_ENDIAN) {
+#if BYTE_ORDER == LITTLE_ENDIAN
 #ifdef BSWAP8
-		Xi[0] = BSWAP8(Z.hi);
-		Xi[1] = BSWAP8(Z.lo);
+	Xi[0] = BSWAP8(Z.hi);
+	Xi[1] = BSWAP8(Z.lo);
 #else
-		u8 *p = (u8 *)Xi;
-		u32 v;
-		v = (u32)(Z.hi>>32);	PUTU32(p,v);
-		v = (u32)(Z.hi);	PUTU32(p+4,v);
-		v = (u32)(Z.lo>>32);	PUTU32(p+8,v);
-		v = (u32)(Z.lo);	PUTU32(p+12,v);
+	u8 *p = (u8 *)Xi;
+	u32 v;
+	v = (u32)(Z.hi>>32);	PUTU32(p,v);
+	v = (u32)(Z.hi);	PUTU32(p+4,v);
+	v = (u32)(Z.lo>>32);	PUTU32(p+8,v);
+	v = (u32)(Z.lo);	PUTU32(p+12,v);
+#endif
+#else /* BIG_ENDIAN */
+	Xi[0] = Z.hi;
+	Xi[1] = Z.lo;
 #endif
-	}
-	else {
-		Xi[0] = Z.hi;
-		Xi[1] = Z.lo;
-	}
 }
 
 #if !defined(OPENSSL_SMALL_FOOTPRINT)
@@ -422,11 +421,11 @@ static void gcm_ghash_4bit(u64 Xi[2],const u128 Htable[16],
 		rem  = (size_t)Z.lo&0xf;
 		Z.lo = (Z.hi<<60)|(Z.lo>>4);
 		Z.hi = (Z.hi>>4);
-		if (sizeof(size_t)==8)
-			Z.hi ^= rem_4bit[rem];
-		else
-			Z.hi ^= (u64)rem_4bit[rem]<<32;
-
+#ifdef _LP64
+		Z.hi ^= rem_4bit[rem];
+#else
+		Z.hi ^= (u64)rem_4bit[rem]<<32;
+#endif
 		Z.hi ^= Htable[nhi].hi;
 		Z.lo ^= Htable[nhi].lo;
 
@@ -440,11 +439,11 @@ static void gcm_ghash_4bit(u64 Xi[2],const u128 Htable[16],
 		rem  = (size_t)Z.lo&0xf;
 		Z.lo = (Z.hi<<60)|(Z.lo>>4);
 		Z.hi = (Z.hi>>4);
-		if (sizeof(size_t)==8)
-			Z.hi ^= rem_4bit[rem];
-		else
-			Z.hi ^= (u64)rem_4bit[rem]<<32;
-
+#ifdef _LP64
+		Z.hi ^= rem_4bit[rem];
+#else
+		Z.hi ^= (u64)rem_4bit[rem]<<32;
+#endif
 		Z.hi ^= Htable[nlo].hi;
 		Z.lo ^= Htable[nlo].lo;
 	}
@@ -542,23 +541,22 @@ static void gcm_ghash_4bit(u64 Xi[2],const u128 Htable[16],
 	Z.hi ^= ((u64)rem_8bit[rem<<4])<<48;
 #endif
 
-	if (BYTE_ORDER == LITTLE_ENDIAN) {
+#if BYTE_ORDER == LITTLE_ENDIAN
 #ifdef BSWAP8
-		Xi[0] = BSWAP8(Z.hi);
-		Xi[1] = BSWAP8(Z.lo);
+	Xi[0] = BSWAP8(Z.hi);
+	Xi[1] = BSWAP8(Z.lo);
 #else
-		u8 *p = (u8 *)Xi;
-		u32 v;
-		v = (u32)(Z.hi>>32);	PUTU32(p,v);
-		v = (u32)(Z.hi);	PUTU32(p+4,v);
-		v = (u32)(Z.lo>>32);	PUTU32(p+8,v);
-		v = (u32)(Z.lo);	PUTU32(p+12,v);
+	u8 *p = (u8 *)Xi;
+	u32 v;
+	v = (u32)(Z.hi>>32);	PUTU32(p,v);
+	v = (u32)(Z.hi);	PUTU32(p+4,v);
+	v = (u32)(Z.lo>>32);	PUTU32(p+8,v);
+	v = (u32)(Z.lo);	PUTU32(p+12,v);
+#endif
+#else /* BIG_ENDIAN */
+	Xi[0] = Z.hi;
+	Xi[1] = Z.lo;
 #endif
-	}
-	else {
-		Xi[0] = Z.hi;
-		Xi[1] = Z.lo;
-	}
     } while (inp+=16, len-=16);
 }
 #endif
@@ -589,22 +587,21 @@ static void gcm_gmult_1bit(u64 Xi[2],const u64 H[2])
 	V.lo = H[1];
 
 	for (j=0; j<16/sizeof(long); ++j) {
-		if (BYTE_ORDER == LITTLE_ENDIAN) {
-			if (sizeof(long)==8) {
+#if BYTE_ORDER == LITTLE_ENDIAN
+#ifdef _LP64
 #ifdef BSWAP8
-				X = (long)(BSWAP8(xi[j]));
+			X = (long)(BSWAP8(xi[j]));
 #else
-				const u8 *p = (const u8 *)(xi+j);
-				X = (long)((u64)GETU32(p)<<32|GETU32(p+4));
+			const u8 *p = (const u8 *)(xi+j);
+			X = (long)((u64)GETU32(p)<<32|GETU32(p+4));
+#endif
+#else
+			const u8 *p = (const u8 *)(xi+j);
+			X = (long)GETU32(p);
+#endif
+#else /* BIG_ENDIAN */
+		X = xi[j];
 #endif
-			}
-			else {
-				const u8 *p = (const u8 *)(xi+j);
-				X = (long)GETU32(p);
-			}
-		}
-		else
-			X = xi[j];
 
 		for (i=0; i<8*sizeof(long); ++i, X<<=1) {
 			u64 M = (u64)(X>>(8*sizeof(long)-1));
@@ -615,23 +612,22 @@ static void gcm_gmult_1bit(u64 Xi[2],const u64 H[2])
 		}
 	}
 
-	if (BYTE_ORDER == LITTLE_ENDIAN) {
+#if BYTE_ORDER == LITTLE_ENDIAN
 #ifdef BSWAP8
-		Xi[0] = BSWAP8(Z.hi);
-		Xi[1] = BSWAP8(Z.lo);
+	Xi[0] = BSWAP8(Z.hi);
+	Xi[1] = BSWAP8(Z.lo);
 #else
-		u8 *p = (u8 *)Xi;
-		u32 v;
-		v = (u32)(Z.hi>>32);	PUTU32(p,v);
-		v = (u32)(Z.hi);	PUTU32(p+4,v);
-		v = (u32)(Z.lo>>32);	PUTU32(p+8,v);
-		v = (u32)(Z.lo);	PUTU32(p+12,v);
+	u8 *p = (u8 *)Xi;
+	u32 v;
+	v = (u32)(Z.hi>>32);	PUTU32(p,v);
+	v = (u32)(Z.hi);	PUTU32(p+4,v);
+	v = (u32)(Z.lo>>32);	PUTU32(p+8,v);
+	v = (u32)(Z.lo);	PUTU32(p+12,v);
+#endif
+#else /* BIG_ENDIAN */
+	Xi[0] = Z.hi;
+	Xi[1] = Z.lo;
 #endif
-	}
-	else {
-		Xi[0] = Z.hi;
-		Xi[1] = Z.lo;
-	}
 }
 #define GCM_MUL(ctx,Xi)	  gcm_gmult_1bit(ctx->Xi.u,ctx->H.u)
 
@@ -691,20 +687,20 @@ void CRYPTO_gcm128_init(GCM128_CONTEXT *ctx,void *key,block128_f block)
 
 	(*block)(ctx->H.c,ctx->H.c,key);
 
-	if (BYTE_ORDER == LITTLE_ENDIAN) {
-		/* H is stored in host byte order */
+#if BYTE_ORDER == LITTLE_ENDIAN
+	/* H is stored in host byte order */
 #ifdef BSWAP8
-		ctx->H.u[0] = BSWAP8(ctx->H.u[0]);
-		ctx->H.u[1] = BSWAP8(ctx->H.u[1]);
+	ctx->H.u[0] = BSWAP8(ctx->H.u[0]);
+	ctx->H.u[1] = BSWAP8(ctx->H.u[1]);
 #else
-		u8 *p = ctx->H.c;
-		u64 hi,lo;
-		hi = (u64)GETU32(p)  <<32|GETU32(p+4);
-		lo = (u64)GETU32(p+8)<<32|GETU32(p+12);
-		ctx->H.u[0] = hi;
-		ctx->H.u[1] = lo;
+	u8 *p = ctx->H.c;
+	u64 hi,lo;
+	hi = (u64)GETU32(p)  <<32|GETU32(p+4);
+	lo = (u64)GETU32(p+8)<<32|GETU32(p+12);
+	ctx->H.u[0] = hi;
+	ctx->H.u[1] = lo;
+#endif
 #endif
-	}
 
 #if	TABLE_BITS==8
 	gcm_init_8bit(ctx->Htable,ctx->H.u);
@@ -788,45 +784,47 @@ void CRYPTO_gcm128_setiv(GCM128_CONTEXT *ctx,const unsigned char *iv,size_t len)
 			GCM_MUL(ctx,Yi);
 		}
 		len0 <<= 3;
-		if (BYTE_ORDER == LITTLE_ENDIAN) {
+#if BYTE_ORDER == LITTLE_ENDIAN
 #ifdef BSWAP8
-			ctx->Yi.u[1]  ^= BSWAP8(len0);
+		ctx->Yi.u[1]  ^= BSWAP8(len0);
 #else
-			ctx->Yi.c[8]  ^= (u8)(len0>>56);
-			ctx->Yi.c[9]  ^= (u8)(len0>>48);
-			ctx->Yi.c[10] ^= (u8)(len0>>40);
-			ctx->Yi.c[11] ^= (u8)(len0>>32);
-			ctx->Yi.c[12] ^= (u8)(len0>>24);
-			ctx->Yi.c[13] ^= (u8)(len0>>16);
-			ctx->Yi.c[14] ^= (u8)(len0>>8);
-			ctx->Yi.c[15] ^= (u8)(len0);
+		ctx->Yi.c[8]  ^= (u8)(len0>>56);
+		ctx->Yi.c[9]  ^= (u8)(len0>>48);
+		ctx->Yi.c[10] ^= (u8)(len0>>40);
+		ctx->Yi.c[11] ^= (u8)(len0>>32);
+		ctx->Yi.c[12] ^= (u8)(len0>>24);
+		ctx->Yi.c[13] ^= (u8)(len0>>16);
+		ctx->Yi.c[14] ^= (u8)(len0>>8);
+		ctx->Yi.c[15] ^= (u8)(len0);
+#endif
+#else /* BIG_ENDIAN */
+		ctx->Yi.u[1]  ^= len0;
 #endif
-		}
-		else
-			ctx->Yi.u[1]  ^= len0;
 
 		GCM_MUL(ctx,Yi);
 
-		if (BYTE_ORDER == LITTLE_ENDIAN)
+#if BYTE_ORDER == LITTLE_ENDIAN
 #ifdef BSWAP4
-			ctr = BSWAP4(ctx->Yi.d[3]);
+		ctr = BSWAP4(ctx->Yi.d[3]);
 #else
-			ctr = GETU32(ctx->Yi.c+12);
+		ctr = GETU32(ctx->Yi.c+12);
+#endif
+#else /* BIG_ENDIAN */
+		ctr = ctx->Yi.d[3];
 #endif
-		else
-			ctr = ctx->Yi.d[3];
 	}
 
 	(*ctx->block)(ctx->Yi.c,ctx->EK0.c,ctx->key);
 	++ctr;
-	if (BYTE_ORDER == LITTLE_ENDIAN)
+#if BYTE_ORDER == LITTLE_ENDIAN
 #ifdef BSWAP4
-		ctx->Yi.d[3] = BSWAP4(ctr);
+	ctx->Yi.d[3] = BSWAP4(ctr);
 #else
-		PUTU32(ctx->Yi.c+12,ctr);
+	PUTU32(ctx->Yi.c+12,ctr);
+#endif
+#else /* BIG_ENDIAN */
+	ctx->Yi.d[3] = ctr;
 #endif
-	else
-		ctx->Yi.d[3] = ctr;
 }
 
 int CRYPTO_gcm128_aad(GCM128_CONTEXT *ctx,const unsigned char *aad,size_t len)
@@ -914,14 +912,15 @@ int CRYPTO_gcm128_encrypt(GCM128_CONTEXT *ctx,
 		ctx->ares = 0;
 	}
 
-	if (BYTE_ORDER == LITTLE_ENDIAN)
+#if BYTE_ORDER == LITTLE_ENDIAN
 #ifdef BSWAP4
-		ctr = BSWAP4(ctx->Yi.d[3]);
+	ctr = BSWAP4(ctx->Yi.d[3]);
 #else
-		ctr = GETU32(ctx->Yi.c+12);
+	ctr = GETU32(ctx->Yi.c+12);
+#endif
+#else /* BIG_ENDIAN */
+	ctr = ctx->Yi.d[3];
 #endif
-	else
-		ctr = ctx->Yi.d[3];
 
 	n = ctx->mres;
 #if !defined(OPENSSL_SMALL_FOOTPRINT)
@@ -952,14 +951,15 @@ int CRYPTO_gcm128_encrypt(GCM128_CONTEXT *ctx,
 
 			(*block)(ctx->Yi.c,ctx->EKi.c,key);
 			++ctr;
-			if (BYTE_ORDER == LITTLE_ENDIAN)
+#if BYTE_ORDER == LITTLE_ENDIAN
 #ifdef BSWAP4
-				ctx->Yi.d[3] = BSWAP4(ctr);
+			ctx->Yi.d[3] = BSWAP4(ctr);
 #else
-				PUTU32(ctx->Yi.c+12,ctr);
+			PUTU32(ctx->Yi.c+12,ctr);
+#endif
+#else /* BIG_ENDIAN */
+			ctx->Yi.d[3] = ctr;
 #endif
-			else
-				ctx->Yi.d[3] = ctr;
 			for (i=0; i<16/sizeof(size_t); ++i)
 				out_t[i] = in_t[i] ^ ctx->EKi.t[i];
 			out += 16;
@@ -978,14 +978,15 @@ int CRYPTO_gcm128_encrypt(GCM128_CONTEXT *ctx,
 
 			(*block)(ctx->Yi.c,ctx->EKi.c,key);
 			++ctr;
-			if (BYTE_ORDER == LITTLE_ENDIAN)
+#if BYTE_ORDER == LITTLE_ENDIAN
 #ifdef BSWAP4
-				ctx->Yi.d[3] = BSWAP4(ctr);
+			ctx->Yi.d[3] = BSWAP4(ctr);
 #else
-				PUTU32(ctx->Yi.c+12,ctr);
+			PUTU32(ctx->Yi.c+12,ctr);
+#endif
+#else /* BIG_ENDIAN */
+			ctx->Yi.d[3] = ctr;
 #endif
-			else
-				ctx->Yi.d[3] = ctr;
 			for (i=0; i<16/sizeof(size_t); ++i)
 				out_t[i] = in_t[i] ^ ctx->EKi.t[i];
 			out += 16;
@@ -1001,14 +1002,15 @@ int CRYPTO_gcm128_encrypt(GCM128_CONTEXT *ctx,
 
 			(*block)(ctx->Yi.c,ctx->EKi.c,key);
 			++ctr;
-			if (BYTE_ORDER == LITTLE_ENDIAN)
+#if BYTE_ORDER == LITTLE_ENDIAN
 #ifdef BSWAP4
-				ctx->Yi.d[3] = BSWAP4(ctr);
+			ctx->Yi.d[3] = BSWAP4(ctr);
 #else
-				PUTU32(ctx->Yi.c+12,ctr);
+			PUTU32(ctx->Yi.c+12,ctr);
+#endif
+#else /* BIG_ENDIAN */
+			ctx->Yi.d[3] = ctr;
 #endif
-			else
-				ctx->Yi.d[3] = ctr;
 			for (i=0; i<16/sizeof(size_t); ++i)
 				ctx->Xi.t[i] ^=
 				out_t[i] = in_t[i]^ctx->EKi.t[i];
@@ -1021,14 +1023,15 @@ int CRYPTO_gcm128_encrypt(GCM128_CONTEXT *ctx,
 		if (len) {
 			(*block)(ctx->Yi.c,ctx->EKi.c,key);
 			++ctr;
-			if (BYTE_ORDER == LITTLE_ENDIAN)
+#if BYTE_ORDER == LITTLE_ENDIAN
 #ifdef BSWAP4
-				ctx->Yi.d[3] = BSWAP4(ctr);
+			ctx->Yi.d[3] = BSWAP4(ctr);
 #else
-				PUTU32(ctx->Yi.c+12,ctr);
+			PUTU32(ctx->Yi.c+12,ctr);
+#endif
+#else /* BIG_ENDIAN */
+			ctx->Yi.d[3] = ctr;
 #endif
-			else
-				ctx->Yi.d[3] = ctr;
 			while (len--) {
 				ctx->Xi.c[n] ^= out[n] = in[n]^ctx->EKi.c[n];
 				++n;
@@ -1043,14 +1046,15 @@ int CRYPTO_gcm128_encrypt(GCM128_CONTEXT *ctx,
 		if (n==0) {
 			(*block)(ctx->Yi.c,ctx->EKi.c,key);
 			++ctr;
-			if (BYTE_ORDER == LITTLE_ENDIAN)
+#if BYTE_ORDER == LITTLE_ENDIAN
 #ifdef BSWAP4
-				ctx->Yi.d[3] = BSWAP4(ctr);
+			ctx->Yi.d[3] = BSWAP4(ctr);
 #else
-				PUTU32(ctx->Yi.c+12,ctr);
+			PUTU32(ctx->Yi.c+12,ctr);
+#endif
+#else /* BIG_ENDIAN */
+			ctx->Yi.d[3] = ctr;
 #endif
-			else
-				ctx->Yi.d[3] = ctr;
 		}
 		ctx->Xi.c[n] ^= out[i] = in[i]^ctx->EKi.c[n];
 		n = (n+1)%16;
@@ -1090,14 +1094,15 @@ int CRYPTO_gcm128_decrypt(GCM128_CONTEXT *ctx,
 		ctx->ares = 0;
 	}
 
-	if (BYTE_ORDER == LITTLE_ENDIAN)
+#if BYTE_ORDER == LITTLE_ENDIAN
 #ifdef BSWAP4
-		ctr = BSWAP4(ctx->Yi.d[3]);
+	ctr = BSWAP4(ctx->Yi.d[3]);
 #else
-		ctr = GETU32(ctx->Yi.c+12);
+	ctr = GETU32(ctx->Yi.c+12);
+#endif
+#else /* BIG_ENDIAN */
+	ctr = ctx->Yi.d[3];
 #endif
-	else
-		ctr = ctx->Yi.d[3];
 
 	n = ctx->mres;
 #if !defined(OPENSSL_SMALL_FOOTPRINT)
@@ -1131,14 +1136,15 @@ int CRYPTO_gcm128_decrypt(GCM128_CONTEXT *ctx,
 
 			(*block)(ctx->Yi.c,ctx->EKi.c,key);
 			++ctr;
-			if (BYTE_ORDER == LITTLE_ENDIAN)
+#if BYTE_ORDER == LITTLE_ENDIAN
 #ifdef BSWAP4
 				ctx->Yi.d[3] = BSWAP4(ctr);
 #else
 				PUTU32(ctx->Yi.c+12,ctr);
 #endif
-			else
+#else /* BIG_ENDIAN */
 				ctx->Yi.d[3] = ctr;
+#endif
 			for (i=0; i<16/sizeof(size_t); ++i)
 				out_t[i] = in_t[i]^ctx->EKi.t[i];
 			out += 16;
@@ -1155,14 +1161,15 @@ int CRYPTO_gcm128_decrypt(GCM128_CONTEXT *ctx,
 
 			(*block)(ctx->Yi.c,ctx->EKi.c,key);
 			++ctr;
-			if (BYTE_ORDER == LITTLE_ENDIAN)
+#if BYTE_ORDER == LITTLE_ENDIAN
 #ifdef BSWAP4
-				ctx->Yi.d[3] = BSWAP4(ctr);
+			ctx->Yi.d[3] = BSWAP4(ctr);
 #else
-				PUTU32(ctx->Yi.c+12,ctr);
+			PUTU32(ctx->Yi.c+12,ctr);
+#endif
+#else /* BIG_ENDIAN */
+			ctx->Yi.d[3] = ctr;
 #endif
-			else
-				ctx->Yi.d[3] = ctr;
 			for (i=0; i<16/sizeof(size_t); ++i)
 				out_t[i] = in_t[i]^ctx->EKi.t[i];
 			out += 16;
@@ -1177,14 +1184,15 @@ int CRYPTO_gcm128_decrypt(GCM128_CONTEXT *ctx,
 
 			(*block)(ctx->Yi.c,ctx->EKi.c,key);
 			++ctr;
-			if (BYTE_ORDER == LITTLE_ENDIAN)
+#ifdef BYTE_ORDER == LITTLE_ENDIAN
 #ifdef BSWAP4
-				ctx->Yi.d[3] = BSWAP4(ctr);
+			ctx->Yi.d[3] = BSWAP4(ctr);
 #else
-				PUTU32(ctx->Yi.c+12,ctr);
+			PUTU32(ctx->Yi.c+12,ctr);
+#endif
+#else /* BIG_ENDIAN */
+			ctx->Yi.d[3] = ctr;
 #endif
-			else
-				ctx->Yi.d[3] = ctr;
 			for (i=0; i<16/sizeof(size_t); ++i) {
 				size_t c = in[i];
 				out[i] = c^ctx->EKi.t[i];
@@ -1199,14 +1207,15 @@ int CRYPTO_gcm128_decrypt(GCM128_CONTEXT *ctx,
 		if (len) {
 			(*block)(ctx->Yi.c,ctx->EKi.c,key);
 			++ctr;
-			if (BYTE_ORDER == LITTLE_ENDIAN)
+#if BYTE_ORDER == LITTLE_ENDIAN
 #ifdef BSWAP4
-				ctx->Yi.d[3] = BSWAP4(ctr);
+			ctx->Yi.d[3] = BSWAP4(ctr);
 #else
-				PUTU32(ctx->Yi.c+12,ctr);
+			PUTU32(ctx->Yi.c+12,ctr);
+#endif
+#else /* BIG_ENDIAN */
+			ctx->Yi.d[3] = ctr;
 #endif
-			else
-				ctx->Yi.d[3] = ctr;
 			while (len--) {
 				u8 c = in[n];
 				ctx->Xi.c[n] ^= c;
@@ -1224,14 +1233,15 @@ int CRYPTO_gcm128_decrypt(GCM128_CONTEXT *ctx,
 		if (n==0) {
 			(*block)(ctx->Yi.c,ctx->EKi.c,key);
 			++ctr;
-			if (BYTE_ORDER == LITTLE_ENDIAN)
+#if BYTE_ORDER == LITTLE_ENDIAN
 #ifdef BSWAP4
-				ctx->Yi.d[3] = BSWAP4(ctr);
+			ctx->Yi.d[3] = BSWAP4(ctr);
 #else
-				PUTU32(ctx->Yi.c+12,ctr);
+			PUTU32(ctx->Yi.c+12,ctr);
+#endif
+#else /* BIG_ENDIAN */
+			ctx->Yi.d[3] = ctr;
 #endif
-			else
-				ctx->Yi.d[3] = ctr;
 		}
 		c = in[i];
 		out[i] = c^ctx->EKi.c[n];
@@ -1272,14 +1282,15 @@ int CRYPTO_gcm128_encrypt_ctr32(GCM128_CONTEXT *ctx,
 		ctx->ares = 0;
 	}
 
-	if (BYTE_ORDER == LITTLE_ENDIAN)
+#if BYTE_ORDER == LITTLE_ENDIAN
 #ifdef BSWAP4
-		ctr = BSWAP4(ctx->Yi.d[3]);
+	ctr = BSWAP4(ctx->Yi.d[3]);
 #else
-		ctr = GETU32(ctx->Yi.c+12);
+	ctr = GETU32(ctx->Yi.c+12);
+#endif
+#else /* BIG_ENDIAN */
+	ctr = ctx->Yi.d[3];
 #endif
-	else
-		ctr = ctx->Yi.d[3];
 
 	n = ctx->mres;
 	if (n) {
@@ -1298,14 +1309,15 @@ int CRYPTO_gcm128_encrypt_ctr32(GCM128_CONTEXT *ctx,
 	while (len>=GHASH_CHUNK) {
 		(*stream)(in,out,GHASH_CHUNK/16,key,ctx->Yi.c);
 		ctr += GHASH_CHUNK/16;
-		if (BYTE_ORDER == LITTLE_ENDIAN)
+#if BYTE_ORDER == LITTLE_ENDIAN
 #ifdef BSWAP4
-			ctx->Yi.d[3] = BSWAP4(ctr);
+		ctx->Yi.d[3] = BSWAP4(ctr);
 #else
-			PUTU32(ctx->Yi.c+12,ctr);
+		PUTU32(ctx->Yi.c+12,ctr);
+#endif
+#else /* BIG_ENDIAN */
+		ctx->Yi.d[3] = ctr;
 #endif
-		else
-			ctx->Yi.d[3] = ctr;
 		GHASH(ctx,out,GHASH_CHUNK);
 		out += GHASH_CHUNK;
 		in  += GHASH_CHUNK;
@@ -1317,14 +1329,15 @@ int CRYPTO_gcm128_encrypt_ctr32(GCM128_CONTEXT *ctx,
 
 		(*stream)(in,out,j,key,ctx->Yi.c);
 		ctr += (unsigned int)j;
-		if (BYTE_ORDER == LITTLE_ENDIAN)
+#if BYTE_ORDER == LITTLE_ENDIAN
 #ifdef BSWAP4
-			ctx->Yi.d[3] = BSWAP4(ctr);
+		ctx->Yi.d[3] = BSWAP4(ctr);
 #else
-			PUTU32(ctx->Yi.c+12,ctr);
+		PUTU32(ctx->Yi.c+12,ctr);
+#endif
+#else /* BIG_ENDIAN */
+		ctx->Yi.d[3] = ctr;
 #endif
-		else
-			ctx->Yi.d[3] = ctr;
 		in  += i;
 		len -= i;
 #if defined(GHASH)
@@ -1341,14 +1354,15 @@ int CRYPTO_gcm128_encrypt_ctr32(GCM128_CONTEXT *ctx,
 	if (len) {
 		(*ctx->block)(ctx->Yi.c,ctx->EKi.c,key);
 		++ctr;
-		if (BYTE_ORDER == LITTLE_ENDIAN)
+#if BYTE_ORDER == LITTLE_ENDIAN
 #ifdef BSWAP4
-			ctx->Yi.d[3] = BSWAP4(ctr);
+		ctx->Yi.d[3] = BSWAP4(ctr);
 #else
-			PUTU32(ctx->Yi.c+12,ctr);
+		PUTU32(ctx->Yi.c+12,ctr);
+#endif
+#else /* BIG_ENDIAN */
+		ctx->Yi.d[3] = ctr;
 #endif
-		else
-			ctx->Yi.d[3] = ctr;
 		while (len--) {
 			ctx->Xi.c[n] ^= out[n] = in[n]^ctx->EKi.c[n];
 			++n;
@@ -1386,14 +1400,15 @@ int CRYPTO_gcm128_decrypt_ctr32(GCM128_CONTEXT *ctx,
 		ctx->ares = 0;
 	}
 
-	if (BYTE_ORDER == LITTLE_ENDIAN)
+#if BYTE_ORDER == LITTLE_ENDIAN
 #ifdef BSWAP4
-		ctr = BSWAP4(ctx->Yi.d[3]);
+	ctr = BSWAP4(ctx->Yi.d[3]);
 #else
-		ctr = GETU32(ctx->Yi.c+12);
+	ctr = GETU32(ctx->Yi.c+12);
+#endif
+#else /* BIG_ENDIAN */
+	ctr = ctx->Yi.d[3];
 #endif
-	else
-		ctr = ctx->Yi.d[3];
 
 	n = ctx->mres;
 	if (n) {
@@ -1415,14 +1430,15 @@ int CRYPTO_gcm128_decrypt_ctr32(GCM128_CONTEXT *ctx,
 		GHASH(ctx,in,GHASH_CHUNK);
 		(*stream)(in,out,GHASH_CHUNK/16,key,ctx->Yi.c);
 		ctr += GHASH_CHUNK/16;
-		if (BYTE_ORDER == LITTLE_ENDIAN)
+#if BYTE_ORDER == LITTLE_ENDIAN
 #ifdef BSWAP4
-			ctx->Yi.d[3] = BSWAP4(ctr);
+		ctx->Yi.d[3] = BSWAP4(ctr);
 #else
-			PUTU32(ctx->Yi.c+12,ctr);
+		PUTU32(ctx->Yi.c+12,ctr);
+#endif
+#else /* BIG_ENDIAN */
+		ctx->Yi.d[3] = ctr;
 #endif
-		else
-			ctx->Yi.d[3] = ctr;
 		out += GHASH_CHUNK;
 		in  += GHASH_CHUNK;
 		len -= GHASH_CHUNK;
@@ -1445,14 +1461,15 @@ int CRYPTO_gcm128_decrypt_ctr32(GCM128_CONTEXT *ctx,
 #endif
 		(*stream)(in,out,j,key,ctx->Yi.c);
 		ctr += (unsigned int)j;
-		if (BYTE_ORDER == LITTLE_ENDIAN)
+#if BYTE_ORDER == LITTLE_ENDIAN
 #ifdef BSWAP4
-			ctx->Yi.d[3] = BSWAP4(ctr);
+		ctx->Yi.d[3] = BSWAP4(ctr);
 #else
-			PUTU32(ctx->Yi.c+12,ctr);
+		PUTU32(ctx->Yi.c+12,ctr);
+#endif
+#else /* BIG_ENDIAN */
+		ctx->Yi.d[3] = ctr;
 #endif
-		else
-			ctx->Yi.d[3] = ctr;
 		out += i;
 		in  += i;
 		len -= i;
@@ -1460,14 +1477,15 @@ int CRYPTO_gcm128_decrypt_ctr32(GCM128_CONTEXT *ctx,
 	if (len) {
 		(*ctx->block)(ctx->Yi.c,ctx->EKi.c,key);
 		++ctr;
-		if (BYTE_ORDER == LITTLE_ENDIAN)
+#if BYTE_ORDER == LITTLE_ENDIAN
 #ifdef BSWAP4
-			ctx->Yi.d[3] = BSWAP4(ctr);
+		ctx->Yi.d[3] = BSWAP4(ctr);
 #else
-			PUTU32(ctx->Yi.c+12,ctr);
+		PUTU32(ctx->Yi.c+12,ctr);
+#endif
+#else /* BIG_ENDIAN */
+		ctx->Yi.d[3] = ctr;
 #endif
-		else
-			ctx->Yi.d[3] = ctr;
 		while (len--) {
 			u8 c = in[n];
 			ctx->Xi.c[n] ^= c;
@@ -1492,20 +1510,20 @@ int CRYPTO_gcm128_finish(GCM128_CONTEXT *ctx,const unsigned char *tag,
 	if (ctx->mres || ctx->ares)
 		GCM_MUL(ctx,Xi);
 
-	if (BYTE_ORDER == LITTLE_ENDIAN) {
+#if BYTE_ORDER == LITTLE_ENDIAN
 #ifdef BSWAP8
-		alen = BSWAP8(alen);
-		clen = BSWAP8(clen);
+	alen = BSWAP8(alen);
+	clen = BSWAP8(clen);
 #else
-		u8 *p = ctx->len.c;
+	u8 *p = ctx->len.c;
 
-		ctx->len.u[0] = alen;
-		ctx->len.u[1] = clen;
+	ctx->len.u[0] = alen;
+	ctx->len.u[1] = clen;
 
-		alen = (u64)GETU32(p)  <<32|GETU32(p+4);
-		clen = (u64)GETU32(p+8)<<32|GETU32(p+12);
+	alen = (u64)GETU32(p)  <<32|GETU32(p+4);
+	clen = (u64)GETU32(p+8)<<32|GETU32(p+12);
+#endif
 #endif
-	}
 
 	ctx->Xi.u[0] ^= alen;
 	ctx->Xi.u[1] ^= clen;
diff --git a/src/lib/libcrypto/modes/xts128.c b/src/lib/libcrypto/modes/xts128.c
index 3e2378379e..0be23d4ea9 100644
--- a/src/lib/libcrypto/modes/xts128.c
+++ b/src/lib/libcrypto/modes/xts128.c
@@ -1,4 +1,4 @@
-/* $OpenBSD: xts128.c,v 1.6 2015/02/10 09:46:30 miod Exp $ */
+/* $OpenBSD: xts128.c,v 1.7 2017/08/13 17:46:24 bcook Exp $ */
 /* ====================================================================
  * Copyright (c) 2011 The OpenSSL Project.  All rights reserved.
  *
@@ -98,25 +98,24 @@ int CRYPTO_xts128_encrypt(const XTS128_CONTEXT *ctx, const unsigned char iv[16],
 
 		if (len==0)	return 0;
 
-		if (BYTE_ORDER == LITTLE_ENDIAN) {
-			unsigned int carry,res;
-			
-			res = 0x87&(((int)tweak.d[3])>>31);
-			carry = (unsigned int)(tweak.u[0]>>63);
-			tweak.u[0] = (tweak.u[0]<<1)^res;
-			tweak.u[1] = (tweak.u[1]<<1)|carry;
-		}
-		else {
-			size_t c;
-
-			for (c=0,i=0;i<16;++i) {
-				/*+ substitutes for |, because c is 1 bit */ 
-				c += ((size_t)tweak.c[i])<<1;
-				tweak.c[i] = (u8)c;
-				c = c>>8;
-			}
-			tweak.c[0] ^= (u8)(0x87&(0-c));
+#if BYTE_ORDER == LITTLE_ENDIAN
+		unsigned int carry,res;
+
+		res = 0x87&(((int)tweak.d[3])>>31);
+		carry = (unsigned int)(tweak.u[0]>>63);
+		tweak.u[0] = (tweak.u[0]<<1)^res;
+		tweak.u[1] = (tweak.u[1]<<1)|carry;
+#else /* BIG_ENDIAN */
+		size_t c;
+
+		for (c=0,i=0;i<16;++i) {
+			/*+ substitutes for |, because c is 1 bit */
+			c += ((size_t)tweak.c[i])<<1;
+			tweak.c[i] = (u8)c;
+			c = c>>8;
 		}
+		tweak.c[0] ^= (u8)(0x87&(0-c));
+#endif
 	}
 	if (enc) {
 		for (i=0;i<len;++i) {
@@ -134,25 +133,24 @@ int CRYPTO_xts128_encrypt(const XTS128_CONTEXT *ctx, const unsigned char iv[16],
 	else {
 		union { u64 u[2]; u8 c[16]; } tweak1;
 
-		if (BYTE_ORDER == LITTLE_ENDIAN) {
-			unsigned int carry,res;
+#if BYTE_ORDER == LITTLE_ENDIAN
+		unsigned int carry,res;
 
-			res = 0x87&(((int)tweak.d[3])>>31);
-			carry = (unsigned int)(tweak.u[0]>>63);
-			tweak1.u[0] = (tweak.u[0]<<1)^res;
-			tweak1.u[1] = (tweak.u[1]<<1)|carry;
-		}
-		else {
-			size_t c;
-
-			for (c=0,i=0;i<16;++i) {
-				/*+ substitutes for |, because c is 1 bit */ 
-				c += ((size_t)tweak.c[i])<<1;
-				tweak1.c[i] = (u8)c;
-				c = c>>8;
-			}
-			tweak1.c[0] ^= (u8)(0x87&(0-c));
+		res = 0x87&(((int)tweak.d[3])>>31);
+		carry = (unsigned int)(tweak.u[0]>>63);
+		tweak1.u[0] = (tweak.u[0]<<1)^res;
+		tweak1.u[1] = (tweak.u[1]<<1)|carry;
+#else
+		size_t c;
+
+		for (c=0,i=0;i<16;++i) {
+			/*+ substitutes for |, because c is 1 bit */
+			c += ((size_t)tweak.c[i])<<1;
+			tweak1.c[i] = (u8)c;
+			c = c>>8;
 		}
+		tweak1.c[0] ^= (u8)(0x87&(0-c));
+#endif
 #ifdef __STRICT_ALIGNMENT
 		memcpy(scratch.c,inp,16);
 		scratch.u[0] ^= tweak1.u[0];
diff --git a/src/lib/libcrypto/rc4/rc4_enc.c b/src/lib/libcrypto/rc4/rc4_enc.c
index 4dacf3f708..bd928b58c9 100644
--- a/src/lib/libcrypto/rc4/rc4_enc.c
+++ b/src/lib/libcrypto/rc4/rc4_enc.c
@@ -1,4 +1,4 @@
-/* $OpenBSD: rc4_enc.c,v 1.15 2015/10/21 16:36:50 jsing Exp $ */
+/* $OpenBSD: rc4_enc.c,v 1.16 2017/08/13 17:46:24 bcook Exp $ */
 /* Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com)
  * All rights reserved.
  *
@@ -150,43 +150,43 @@ RC4(RC4_KEY *key, size_t len, const unsigned char *indata,
 		 *
 		 *			<appro@fy.chalmers.se>
 		 */
-		if (BYTE_ORDER != LITTLE_ENDIAN) {	/* BIG-ENDIAN CASE */
+#if BYTE_ORDER == BIG_ENDIAN
 # define BESHFT(c)	(((sizeof(RC4_CHUNK)-(c)-1)*8)&(sizeof(RC4_CHUNK)*8-1))
-			for (; len & (0 - sizeof(RC4_CHUNK)); len -= sizeof(RC4_CHUNK)) {
-				ichunk  = *(RC4_CHUNK *)indata;
-				otp = RC4_STEP << BESHFT(0);
-				otp |= RC4_STEP << BESHFT(1);
-				otp |= RC4_STEP << BESHFT(2);
-				otp |= RC4_STEP << BESHFT(3);
-				if (sizeof(RC4_CHUNK) == 8) {
-					otp |= RC4_STEP << BESHFT(4);
-					otp |= RC4_STEP << BESHFT(5);
-					otp |= RC4_STEP << BESHFT(6);
-					otp |= RC4_STEP << BESHFT(7);
-				}
-				*(RC4_CHUNK *)outdata = otp^ichunk;
-				indata += sizeof(RC4_CHUNK);
-				outdata += sizeof(RC4_CHUNK);
+		for (; len & (0 - sizeof(RC4_CHUNK)); len -= sizeof(RC4_CHUNK)) {
+			ichunk  = *(RC4_CHUNK *)indata;
+			otp = RC4_STEP << BESHFT(0);
+			otp |= RC4_STEP << BESHFT(1);
+			otp |= RC4_STEP << BESHFT(2);
+			otp |= RC4_STEP << BESHFT(3);
+			if (sizeof(RC4_CHUNK) == 8) {
+				otp |= RC4_STEP << BESHFT(4);
+				otp |= RC4_STEP << BESHFT(5);
+				otp |= RC4_STEP << BESHFT(6);
+				otp |= RC4_STEP << BESHFT(7);
 			}
-		} else {	/* LITTLE-ENDIAN CASE */
+			*(RC4_CHUNK *)outdata = otp^ichunk;
+			indata += sizeof(RC4_CHUNK);
+			outdata += sizeof(RC4_CHUNK);
+		}
+#else
 # define LESHFT(c)	(((c)*8)&(sizeof(RC4_CHUNK)*8-1))
-			for (; len & (0 - sizeof(RC4_CHUNK)); len -= sizeof(RC4_CHUNK)) {
-				ichunk = *(RC4_CHUNK *)indata;
-				otp = RC4_STEP;
-				otp |= RC4_STEP << 8;
-				otp |= RC4_STEP << 16;
-				otp |= RC4_STEP << 24;
-				if (sizeof(RC4_CHUNK) == 8) {
-					otp |= RC4_STEP << LESHFT(4);
-					otp |= RC4_STEP << LESHFT(5);
-					otp |= RC4_STEP << LESHFT(6);
-					otp |= RC4_STEP << LESHFT(7);
-				}
-				*(RC4_CHUNK *)outdata = otp ^ ichunk;
-				indata += sizeof(RC4_CHUNK);
-				outdata += sizeof(RC4_CHUNK);
+		for (; len & (0 - sizeof(RC4_CHUNK)); len -= sizeof(RC4_CHUNK)) {
+			ichunk = *(RC4_CHUNK *)indata;
+			otp = RC4_STEP;
+			otp |= RC4_STEP << 8;
+			otp |= RC4_STEP << 16;
+			otp |= RC4_STEP << 24;
+			if (sizeof(RC4_CHUNK) == 8) {
+				otp |= RC4_STEP << LESHFT(4);
+				otp |= RC4_STEP << LESHFT(5);
+				otp |= RC4_STEP << LESHFT(6);
+				otp |= RC4_STEP << LESHFT(7);
 			}
+			*(RC4_CHUNK *)outdata = otp ^ ichunk;
+			indata += sizeof(RC4_CHUNK);
+			outdata += sizeof(RC4_CHUNK);
 		}
+#endif
 	}
 #endif
 #define LOOP(in,out) \
-- 
cgit v1.2.3-55-g6feb