diff options
Diffstat (limited to 'src/lib/libcrypto/md32_common.h')
-rw-r--r-- | src/lib/libcrypto/md32_common.h | 41 |
1 files changed, 27 insertions, 14 deletions
diff --git a/src/lib/libcrypto/md32_common.h b/src/lib/libcrypto/md32_common.h index 2b91f9eef2..1a404a458d 100644 --- a/src/lib/libcrypto/md32_common.h +++ b/src/lib/libcrypto/md32_common.h | |||
@@ -94,6 +94,8 @@ | |||
94 | * in original (data) byte order, implemented externally (it | 94 | * in original (data) byte order, implemented externally (it |
95 | * actually is optional if data and host are of the same | 95 | * actually is optional if data and host are of the same |
96 | * "endianess"). | 96 | * "endianess"). |
97 | * HASH_MAKE_STRING | ||
98 | * macro convering context variables to an ASCII hash string. | ||
97 | * | 99 | * |
98 | * Optional macros: | 100 | * Optional macros: |
99 | * | 101 | * |
@@ -178,8 +180,17 @@ | |||
178 | #undef ROTATE | 180 | #undef ROTATE |
179 | #ifndef PEDANTIC | 181 | #ifndef PEDANTIC |
180 | # if defined(_MSC_VER) | 182 | # if defined(_MSC_VER) |
181 | # define ROTATE(a,n) _lrotl(a,n) | 183 | # define ROTATE(a,n) _lrotl(a,n) |
182 | # elif defined(__GNUC__) && __GNUC__>=2 && !defined(NO_ASM) | 184 | # elif defined(__MWERKS__) |
185 | # if defined(__POWERPC__) | ||
186 | # define ROTATE(a,n) __rlwinm(a,n,0,31) | ||
187 | # elif defined(__MC68K__) | ||
188 | /* Motorola specific tweak. <appro@fy.chalmers.se> */ | ||
189 | # define ROTATE(a,n) ( n<24 ? __rol(a,n) : __ror(a,32-n) ) | ||
190 | # else | ||
191 | # define ROTATE(a,n) __rol(a,n) | ||
192 | # endif | ||
193 | # elif defined(__GNUC__) && __GNUC__>=2 && !defined(NO_ASM) && !defined(NO_INLINE_ASM) | ||
183 | /* | 194 | /* |
184 | * Some GNU C inline assembler templates. Note that these are | 195 | * Some GNU C inline assembler templates. Note that these are |
185 | * rotates by *constant* number of bits! But that's exactly | 196 | * rotates by *constant* number of bits! But that's exactly |
@@ -189,16 +200,16 @@ | |||
189 | */ | 200 | */ |
190 | # if defined(__i386) | 201 | # if defined(__i386) |
191 | # define ROTATE(a,n) ({ register unsigned int ret; \ | 202 | # define ROTATE(a,n) ({ register unsigned int ret; \ |
192 | asm volatile ( \ | 203 | asm ( \ |
193 | "roll %1,%0" \ | 204 | "roll %1,%0" \ |
194 | : "=r"(ret) \ | 205 | : "=r"(ret) \ |
195 | : "I"(n), "0"(a) \ | 206 | : "I"(n), "0"(a) \ |
196 | : "cc"); \ | 207 | : "cc"); \ |
197 | ret; \ | 208 | ret; \ |
198 | }) | 209 | }) |
199 | # elif defined(__powerpc) | 210 | # elif defined(__powerpc) || defined(__ppc) |
200 | # define ROTATE(a,n) ({ register unsigned int ret; \ | 211 | # define ROTATE(a,n) ({ register unsigned int ret; \ |
201 | asm volatile ( \ | 212 | asm ( \ |
202 | "rlwinm %0,%1,%2,0,31" \ | 213 | "rlwinm %0,%1,%2,0,31" \ |
203 | : "=r"(ret) \ | 214 | : "=r"(ret) \ |
204 | : "r"(a), "I"(n)); \ | 215 | : "r"(a), "I"(n)); \ |
@@ -211,18 +222,18 @@ | |||
211 | * Engage compiler specific "fetch in reverse byte order" | 222 | * Engage compiler specific "fetch in reverse byte order" |
212 | * intrinsic function if available. | 223 | * intrinsic function if available. |
213 | */ | 224 | */ |
214 | # if defined(__GNUC__) && __GNUC__>=2 && !defined(NO_ASM) | 225 | # if defined(__GNUC__) && __GNUC__>=2 && !defined(NO_ASM) && !defined(NO_INLINE_ASM) |
215 | /* some GNU C inline assembler templates by <appro@fy.chalmers.se> */ | 226 | /* some GNU C inline assembler templates by <appro@fy.chalmers.se> */ |
216 | # if defined(__i386) && !defined(I386_ONLY) | 227 | # if defined(__i386) && !defined(I386_ONLY) |
217 | # define BE_FETCH32(a) ({ register unsigned int l=(a);\ | 228 | # define BE_FETCH32(a) ({ register unsigned int l=(a);\ |
218 | asm volatile ( \ | 229 | asm ( \ |
219 | "bswapl %0" \ | 230 | "bswapl %0" \ |
220 | : "=r"(l) : "0"(l)); \ | 231 | : "=r"(l) : "0"(l)); \ |
221 | l; \ | 232 | l; \ |
222 | }) | 233 | }) |
223 | # elif defined(__powerpc) | 234 | # elif defined(__powerpc) |
224 | # define LE_FETCH32(a) ({ register unsigned int l; \ | 235 | # define LE_FETCH32(a) ({ register unsigned int l; \ |
225 | asm volatile ( \ | 236 | asm ( \ |
226 | "lwbrx %0,0,%1" \ | 237 | "lwbrx %0,0,%1" \ |
227 | : "=r"(l) \ | 238 | : "=r"(l) \ |
228 | : "r"(a)); \ | 239 | : "r"(a)); \ |
@@ -231,7 +242,7 @@ | |||
231 | 242 | ||
232 | # elif defined(__sparc) && defined(ULTRASPARC) | 243 | # elif defined(__sparc) && defined(ULTRASPARC) |
233 | # define LE_FETCH32(a) ({ register unsigned int l; \ | 244 | # define LE_FETCH32(a) ({ register unsigned int l; \ |
234 | asm volatile ( \ | 245 | asm ( \ |
235 | "lda [%1]#ASI_PRIMARY_LITTLE,%0"\ | 246 | "lda [%1]#ASI_PRIMARY_LITTLE,%0"\ |
236 | : "=r"(l) \ | 247 | : "=r"(l) \ |
237 | : "r"(a)); \ | 248 | : "r"(a)); \ |
@@ -399,8 +410,9 @@ | |||
399 | * Time for some action:-) | 410 | * Time for some action:-) |
400 | */ | 411 | */ |
401 | 412 | ||
402 | void HASH_UPDATE (HASH_CTX *c, const unsigned char *data, unsigned long len) | 413 | void HASH_UPDATE (HASH_CTX *c, const void *data_, unsigned long len) |
403 | { | 414 | { |
415 | const unsigned char *data=data_; | ||
404 | register HASH_LONG * p; | 416 | register HASH_LONG * p; |
405 | register unsigned long l; | 417 | register unsigned long l; |
406 | int sw,sc,ew,ec; | 418 | int sw,sc,ew,ec; |
@@ -581,10 +593,11 @@ void HASH_FINAL (unsigned char *md, HASH_CTX *c) | |||
581 | #endif | 593 | #endif |
582 | HASH_BLOCK_HOST_ORDER (c,p,1); | 594 | HASH_BLOCK_HOST_ORDER (c,p,1); |
583 | 595 | ||
584 | l=c->A; HOST_l2c(l,md); | 596 | #ifndef HASH_MAKE_STRING |
585 | l=c->B; HOST_l2c(l,md); | 597 | #error "HASH_MAKE_STRING must be defined!" |
586 | l=c->C; HOST_l2c(l,md); | 598 | #else |
587 | l=c->D; HOST_l2c(l,md); | 599 | HASH_MAKE_STRING(c,md); |
600 | #endif | ||
588 | 601 | ||
589 | c->num=0; | 602 | c->num=0; |
590 | /* clear stuff, HASH_BLOCK may be leaving some stuff on the stack | 603 | /* clear stuff, HASH_BLOCK may be leaving some stuff on the stack |