diff options
| author | jsing <> | 2024-03-28 07:13:02 +0000 |
|---|---|---|
| committer | jsing <> | 2024-03-28 07:13:02 +0000 |
| commit | 565c772bb303254bfd01ec460d5958bd608b540f (patch) | |
| tree | de69746d50609cbd03f9f42bddc5fd927e43ffed /src | |
| parent | 40ab5b46122eb72915f2ceb3598d2af6b9accaa9 (diff) | |
| download | openbsd-565c772bb303254bfd01ec460d5958bd608b540f.tar.gz openbsd-565c772bb303254bfd01ec460d5958bd608b540f.tar.bz2 openbsd-565c772bb303254bfd01ec460d5958bd608b540f.zip | |
Expand ROTATE macro to crypto_rol_u32().
Diffstat (limited to 'src')
| -rw-r--r-- | src/lib/libcrypto/ripemd/ripemd.c | 27 |
1 files changed, 16 insertions, 11 deletions
diff --git a/src/lib/libcrypto/ripemd/ripemd.c b/src/lib/libcrypto/ripemd/ripemd.c index 60ef403ded..cc526ae256 100644 --- a/src/lib/libcrypto/ripemd/ripemd.c +++ b/src/lib/libcrypto/ripemd/ripemd.c | |||
| @@ -1,4 +1,4 @@ | |||
| 1 | /* $OpenBSD: ripemd.c,v 1.13 2024/03/28 07:04:21 jsing Exp $ */ | 1 | /* $OpenBSD: ripemd.c,v 1.14 2024/03/28 07:13:02 jsing Exp $ */ |
| 2 | /* Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com) | 2 | /* Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com) |
| 3 | * All rights reserved. | 3 | * All rights reserved. |
| 4 | * | 4 | * |
| @@ -65,6 +65,11 @@ | |||
| 65 | #include <openssl/crypto.h> | 65 | #include <openssl/crypto.h> |
| 66 | #include <openssl/ripemd.h> | 66 | #include <openssl/ripemd.h> |
| 67 | 67 | ||
| 68 | #include "crypto_internal.h" | ||
| 69 | |||
| 70 | /* Ensure that SHA_LONG and uint32_t are equivalent sizes. */ | ||
| 71 | CTASSERT(sizeof(RIPEMD160_LONG) == sizeof(uint32_t)); | ||
| 72 | |||
| 68 | #define DATA_ORDER_IS_LITTLE_ENDIAN | 73 | #define DATA_ORDER_IS_LITTLE_ENDIAN |
| 69 | 74 | ||
| 70 | #define HASH_LONG RIPEMD160_LONG | 75 | #define HASH_LONG RIPEMD160_LONG |
| @@ -120,28 +125,28 @@ | |||
| 120 | 125 | ||
| 121 | #define RIP1(a,b,c,d,e,w,s) { \ | 126 | #define RIP1(a,b,c,d,e,w,s) { \ |
| 122 | a+=F1(b,c,d)+w; \ | 127 | a+=F1(b,c,d)+w; \ |
| 123 | a=ROTATE(a,s)+e; \ | 128 | a=crypto_rol_u32(a,s)+e; \ |
| 124 | c=ROTATE(c,10); } | 129 | c=crypto_rol_u32(c,10); } |
| 125 | 130 | ||
| 126 | #define RIP2(a,b,c,d,e,w,s,K) { \ | 131 | #define RIP2(a,b,c,d,e,w,s,K) { \ |
| 127 | a+=F2(b,c,d)+w+K; \ | 132 | a+=F2(b,c,d)+w+K; \ |
| 128 | a=ROTATE(a,s)+e; \ | 133 | a=crypto_rol_u32(a,s)+e; \ |
| 129 | c=ROTATE(c,10); } | 134 | c=crypto_rol_u32(c,10); } |
| 130 | 135 | ||
| 131 | #define RIP3(a,b,c,d,e,w,s,K) { \ | 136 | #define RIP3(a,b,c,d,e,w,s,K) { \ |
| 132 | a+=F3(b,c,d)+w+K; \ | 137 | a+=F3(b,c,d)+w+K; \ |
| 133 | a=ROTATE(a,s)+e; \ | 138 | a=crypto_rol_u32(a,s)+e; \ |
| 134 | c=ROTATE(c,10); } | 139 | c=crypto_rol_u32(c,10); } |
| 135 | 140 | ||
| 136 | #define RIP4(a,b,c,d,e,w,s,K) { \ | 141 | #define RIP4(a,b,c,d,e,w,s,K) { \ |
| 137 | a+=F4(b,c,d)+w+K; \ | 142 | a+=F4(b,c,d)+w+K; \ |
| 138 | a=ROTATE(a,s)+e; \ | 143 | a=crypto_rol_u32(a,s)+e; \ |
| 139 | c=ROTATE(c,10); } | 144 | c=crypto_rol_u32(c,10); } |
| 140 | 145 | ||
| 141 | #define RIP5(a,b,c,d,e,w,s,K) { \ | 146 | #define RIP5(a,b,c,d,e,w,s,K) { \ |
| 142 | a+=F5(b,c,d)+w+K; \ | 147 | a+=F5(b,c,d)+w+K; \ |
| 143 | a=ROTATE(a,s)+e; \ | 148 | a=crypto_rol_u32(a,s)+e; \ |
| 144 | c=ROTATE(c,10); } | 149 | c=crypto_rol_u32(c,10); } |
| 145 | 150 | ||
| 146 | static void | 151 | static void |
| 147 | ripemd160_block_data_order(RIPEMD160_CTX *ctx, const void *p, size_t num) | 152 | ripemd160_block_data_order(RIPEMD160_CTX *ctx, const void *p, size_t num) |
