summaryrefslogtreecommitdiff
path: root/src/lib/libcrypto/ripemd/ripemd.c
diff options
context:
space:
mode:
Diffstat (limited to 'src/lib/libcrypto/ripemd/ripemd.c')
-rw-r--r--src/lib/libcrypto/ripemd/ripemd.c441
1 files changed, 441 insertions, 0 deletions
diff --git a/src/lib/libcrypto/ripemd/ripemd.c b/src/lib/libcrypto/ripemd/ripemd.c
new file mode 100644
index 0000000000..4edf3de4d3
--- /dev/null
+++ b/src/lib/libcrypto/ripemd/ripemd.c
@@ -0,0 +1,441 @@
1/* $OpenBSD: ripemd.c,v 1.1 2023/07/28 11:08:01 jsing Exp $ */
2/* Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com)
3 * All rights reserved.
4 *
5 * This package is an SSL implementation written
6 * by Eric Young (eay@cryptsoft.com).
7 * The implementation was written so as to conform with Netscapes SSL.
8 *
9 * This library is free for commercial and non-commercial use as long as
10 * the following conditions are aheared to. The following conditions
11 * apply to all code found in this distribution, be it the RC4, RSA,
12 * lhash, DES, etc., code; not just the SSL code. The SSL documentation
13 * included with this distribution is covered by the same copyright terms
14 * except that the holder is Tim Hudson (tjh@cryptsoft.com).
15 *
16 * Copyright remains Eric Young's, and as such any Copyright notices in
17 * the code are not to be removed.
18 * If this package is used in a product, Eric Young should be given attribution
19 * as the author of the parts of the library used.
20 * This can be in the form of a textual message at program startup or
21 * in documentation (online or textual) provided with the package.
22 *
23 * Redistribution and use in source and binary forms, with or without
24 * modification, are permitted provided that the following conditions
25 * are met:
26 * 1. Redistributions of source code must retain the copyright
27 * notice, this list of conditions and the following disclaimer.
28 * 2. Redistributions in binary form must reproduce the above copyright
29 * notice, this list of conditions and the following disclaimer in the
30 * documentation and/or other materials provided with the distribution.
31 * 3. All advertising materials mentioning features or use of this software
32 * must display the following acknowledgement:
33 * "This product includes cryptographic software written by
34 * Eric Young (eay@cryptsoft.com)"
35 * The word 'cryptographic' can be left out if the rouines from the library
36 * being used are not cryptographic related :-).
37 * 4. If you include any Windows specific code (or a derivative thereof) from
38 * the apps directory (application code) you must include an acknowledgement:
39 * "This product includes software written by Tim Hudson (tjh@cryptsoft.com)"
40 *
41 * THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND
42 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
43 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
44 * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
45 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
46 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
47 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
48 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
49 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
50 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
51 * SUCH DAMAGE.
52 *
53 * The licence and distribution terms for any publically available version or
54 * derivative of this code cannot be changed. i.e. this code cannot simply be
55 * copied and put under another distribution licence
56 * [including the GNU Public Licence.]
57 */
58
59#include <stdio.h>
60#include <openssl/opensslv.h>
61#include <openssl/crypto.h>
62
63#include <stdlib.h>
64#include <string.h>
65#include <openssl/opensslconf.h>
66#include <openssl/ripemd.h>
67
68/*
69 * DO EXAMINE COMMENTS IN crypto/md5/md5_locl.h & crypto/md5/md5_dgst.c
70 * FOR EXPLANATIONS ON FOLLOWING "CODE."
71 * <appro@fy.chalmers.se>
72 */
73#ifdef RMD160_ASM
74# if defined(__i386) || defined(__i386__) || defined(_M_IX86) || defined(__INTEL__)
75# define ripemd160_block_data_order ripemd160_block_asm_data_order
76# endif
77#endif
78
79__BEGIN_HIDDEN_DECLS
80
81void ripemd160_block_data_order (RIPEMD160_CTX *c, const void *p, size_t num);
82
83__END_HIDDEN_DECLS
84
85#define DATA_ORDER_IS_LITTLE_ENDIAN
86
87#define HASH_LONG RIPEMD160_LONG
88#define HASH_CTX RIPEMD160_CTX
89#define HASH_CBLOCK RIPEMD160_CBLOCK
90#define HASH_UPDATE RIPEMD160_Update
91#define HASH_TRANSFORM RIPEMD160_Transform
92#define HASH_FINAL RIPEMD160_Final
93#define HASH_MAKE_STRING(c,s) do { \
94 unsigned long ll; \
95 ll=(c)->A; HOST_l2c(ll,(s)); \
96 ll=(c)->B; HOST_l2c(ll,(s)); \
97 ll=(c)->C; HOST_l2c(ll,(s)); \
98 ll=(c)->D; HOST_l2c(ll,(s)); \
99 ll=(c)->E; HOST_l2c(ll,(s)); \
100 } while (0)
101#define HASH_BLOCK_DATA_ORDER ripemd160_block_data_order
102
103#include "md32_common.h"
104
105#if 0
106#define F1(x,y,z) ((x)^(y)^(z))
107#define F2(x,y,z) (((x)&(y))|((~x)&z))
108#define F3(x,y,z) (((x)|(~y))^(z))
109#define F4(x,y,z) (((x)&(z))|((y)&(~(z))))
110#define F5(x,y,z) ((x)^((y)|(~(z))))
111#else
112/*
113 * Transformed F2 and F4 are courtesy of Wei Dai <weidai@eskimo.com>
114 */
115#define F1(x,y,z) ((x) ^ (y) ^ (z))
116#define F2(x,y,z) ((((y) ^ (z)) & (x)) ^ (z))
117#define F3(x,y,z) (((~(y)) | (x)) ^ (z))
118#define F4(x,y,z) ((((x) ^ (y)) & (z)) ^ (y))
119#define F5(x,y,z) (((~(z)) | (y)) ^ (x))
120#endif
121
122#define RIPEMD160_A 0x67452301L
123#define RIPEMD160_B 0xEFCDAB89L
124#define RIPEMD160_C 0x98BADCFEL
125#define RIPEMD160_D 0x10325476L
126#define RIPEMD160_E 0xC3D2E1F0L
127
128#include "rmdconst.h"
129
130#define RIP1(a,b,c,d,e,w,s) { \
131 a+=F1(b,c,d)+X(w); \
132 a=ROTATE(a,s)+e; \
133 c=ROTATE(c,10); }
134
135#define RIP2(a,b,c,d,e,w,s,K) { \
136 a+=F2(b,c,d)+X(w)+K; \
137 a=ROTATE(a,s)+e; \
138 c=ROTATE(c,10); }
139
140#define RIP3(a,b,c,d,e,w,s,K) { \
141 a+=F3(b,c,d)+X(w)+K; \
142 a=ROTATE(a,s)+e; \
143 c=ROTATE(c,10); }
144
145#define RIP4(a,b,c,d,e,w,s,K) { \
146 a+=F4(b,c,d)+X(w)+K; \
147 a=ROTATE(a,s)+e; \
148 c=ROTATE(c,10); }
149
150#define RIP5(a,b,c,d,e,w,s,K) { \
151 a+=F5(b,c,d)+X(w)+K; \
152 a=ROTATE(a,s)+e; \
153 c=ROTATE(c,10); }
154
155# ifdef RMD160_ASM
156void ripemd160_block_x86(RIPEMD160_CTX *c, unsigned long *p, size_t num);
157# define ripemd160_block ripemd160_block_x86
158# else
159void ripemd160_block(RIPEMD160_CTX *c, unsigned long *p, size_t num);
160# endif
161
162int
163RIPEMD160_Init(RIPEMD160_CTX *c)
164{
165 memset (c, 0, sizeof(*c));
166 c->A = RIPEMD160_A;
167 c->B = RIPEMD160_B;
168 c->C = RIPEMD160_C;
169 c->D = RIPEMD160_D;
170 c->E = RIPEMD160_E;
171 return 1;
172}
173
174#ifndef ripemd160_block_data_order
175#ifdef X
176#undef X
177#endif
178void
179ripemd160_block_data_order(RIPEMD160_CTX *ctx, const void *p, size_t num)
180{
181 const unsigned char *data = p;
182 unsigned MD32_REG_T A, B,C, D, E;
183 unsigned MD32_REG_T a, b,c, d,e, l;
184#ifndef MD32_XARRAY
185 /* See comment in crypto/sha/sha_locl.h for details. */
186 unsigned MD32_REG_T XX0, XX1, XX2, XX3, XX4, XX5, XX6, XX7,
187 XX8, XX9, XX10, XX11, XX12, XX13, XX14, XX15;
188# define X(i) XX##i
189#else
190 RIPEMD160_LONG XX[16];
191# define X(i) XX[i]
192#endif
193
194 for (; num--; ) {
195
196 A = ctx->A;
197 B = ctx->B;
198 C = ctx->C;
199 D = ctx->D;
200 E = ctx->E;
201
202 HOST_c2l(data, l);
203 X( 0) = l;HOST_c2l(data, l);
204 X( 1) = l;
205 RIP1(A, B,C, D,E, WL00, SL00);
206 HOST_c2l(data, l);
207 X( 2) = l;
208 RIP1(E, A,B, C,D, WL01, SL01);
209 HOST_c2l(data, l);
210 X( 3) = l;
211 RIP1(D, E,A, B,C, WL02, SL02);
212 HOST_c2l(data, l);
213 X( 4) = l;
214 RIP1(C, D,E, A,B, WL03, SL03);
215 HOST_c2l(data, l);
216 X( 5) = l;
217 RIP1(B, C,D, E,A, WL04, SL04);
218 HOST_c2l(data, l);
219 X( 6) = l;
220 RIP1(A, B,C, D,E, WL05, SL05);
221 HOST_c2l(data, l);
222 X( 7) = l;
223 RIP1(E, A,B, C,D, WL06, SL06);
224 HOST_c2l(data, l);
225 X( 8) = l;
226 RIP1(D, E,A, B,C, WL07, SL07);
227 HOST_c2l(data, l);
228 X( 9) = l;
229 RIP1(C, D,E, A,B, WL08, SL08);
230 HOST_c2l(data, l);
231 X(10) = l;
232 RIP1(B, C,D, E,A, WL09, SL09);
233 HOST_c2l(data, l);
234 X(11) = l;
235 RIP1(A, B,C, D,E, WL10, SL10);
236 HOST_c2l(data, l);
237 X(12) = l;
238 RIP1(E, A,B, C,D, WL11, SL11);
239 HOST_c2l(data, l);
240 X(13) = l;
241 RIP1(D, E,A, B,C, WL12, SL12);
242 HOST_c2l(data, l);
243 X(14) = l;
244 RIP1(C, D,E, A,B, WL13, SL13);
245 HOST_c2l(data, l);
246 X(15) = l;
247 RIP1(B, C,D, E,A, WL14, SL14);
248 RIP1(A, B,C, D,E, WL15, SL15);
249
250 RIP2(E, A,B, C,D, WL16, SL16, KL1);
251 RIP2(D, E,A, B,C, WL17, SL17, KL1);
252 RIP2(C, D,E, A,B, WL18, SL18, KL1);
253 RIP2(B, C,D, E,A, WL19, SL19, KL1);
254 RIP2(A, B,C, D,E, WL20, SL20, KL1);
255 RIP2(E, A,B, C,D, WL21, SL21, KL1);
256 RIP2(D, E,A, B,C, WL22, SL22, KL1);
257 RIP2(C, D,E, A,B, WL23, SL23, KL1);
258 RIP2(B, C,D, E,A, WL24, SL24, KL1);
259 RIP2(A, B,C, D,E, WL25, SL25, KL1);
260 RIP2(E, A,B, C,D, WL26, SL26, KL1);
261 RIP2(D, E,A, B,C, WL27, SL27, KL1);
262 RIP2(C, D,E, A,B, WL28, SL28, KL1);
263 RIP2(B, C,D, E,A, WL29, SL29, KL1);
264 RIP2(A, B,C, D,E, WL30, SL30, KL1);
265 RIP2(E, A,B, C,D, WL31, SL31, KL1);
266
267 RIP3(D, E,A, B,C, WL32, SL32, KL2);
268 RIP3(C, D,E, A,B, WL33, SL33, KL2);
269 RIP3(B, C,D, E,A, WL34, SL34, KL2);
270 RIP3(A, B,C, D,E, WL35, SL35, KL2);
271 RIP3(E, A,B, C,D, WL36, SL36, KL2);
272 RIP3(D, E,A, B,C, WL37, SL37, KL2);
273 RIP3(C, D,E, A,B, WL38, SL38, KL2);
274 RIP3(B, C,D, E,A, WL39, SL39, KL2);
275 RIP3(A, B,C, D,E, WL40, SL40, KL2);
276 RIP3(E, A,B, C,D, WL41, SL41, KL2);
277 RIP3(D, E,A, B,C, WL42, SL42, KL2);
278 RIP3(C, D,E, A,B, WL43, SL43, KL2);
279 RIP3(B, C,D, E,A, WL44, SL44, KL2);
280 RIP3(A, B,C, D,E, WL45, SL45, KL2);
281 RIP3(E, A,B, C,D, WL46, SL46, KL2);
282 RIP3(D, E,A, B,C, WL47, SL47, KL2);
283
284 RIP4(C, D,E, A,B, WL48, SL48, KL3);
285 RIP4(B, C,D, E,A, WL49, SL49, KL3);
286 RIP4(A, B,C, D,E, WL50, SL50, KL3);
287 RIP4(E, A,B, C,D, WL51, SL51, KL3);
288 RIP4(D, E,A, B,C, WL52, SL52, KL3);
289 RIP4(C, D,E, A,B, WL53, SL53, KL3);
290 RIP4(B, C,D, E,A, WL54, SL54, KL3);
291 RIP4(A, B,C, D,E, WL55, SL55, KL3);
292 RIP4(E, A,B, C,D, WL56, SL56, KL3);
293 RIP4(D, E,A, B,C, WL57, SL57, KL3);
294 RIP4(C, D,E, A,B, WL58, SL58, KL3);
295 RIP4(B, C,D, E,A, WL59, SL59, KL3);
296 RIP4(A, B,C, D,E, WL60, SL60, KL3);
297 RIP4(E, A,B, C,D, WL61, SL61, KL3);
298 RIP4(D, E,A, B,C, WL62, SL62, KL3);
299 RIP4(C, D,E, A,B, WL63, SL63, KL3);
300
301 RIP5(B, C,D, E,A, WL64, SL64, KL4);
302 RIP5(A, B,C, D,E, WL65, SL65, KL4);
303 RIP5(E, A,B, C,D, WL66, SL66, KL4);
304 RIP5(D, E,A, B,C, WL67, SL67, KL4);
305 RIP5(C, D,E, A,B, WL68, SL68, KL4);
306 RIP5(B, C,D, E,A, WL69, SL69, KL4);
307 RIP5(A, B,C, D,E, WL70, SL70, KL4);
308 RIP5(E, A,B, C,D, WL71, SL71, KL4);
309 RIP5(D, E,A, B,C, WL72, SL72, KL4);
310 RIP5(C, D,E, A,B, WL73, SL73, KL4);
311 RIP5(B, C,D, E,A, WL74, SL74, KL4);
312 RIP5(A, B,C, D,E, WL75, SL75, KL4);
313 RIP5(E, A,B, C,D, WL76, SL76, KL4);
314 RIP5(D, E,A, B,C, WL77, SL77, KL4);
315 RIP5(C, D,E, A,B, WL78, SL78, KL4);
316 RIP5(B, C,D, E,A, WL79, SL79, KL4);
317
318 a = A;
319 b = B;
320 c = C;
321 d = D;
322 e = E;
323 /* Do other half */
324 A = ctx->A;
325 B = ctx->B;
326 C = ctx->C;
327 D = ctx->D;
328 E = ctx->E;
329
330 RIP5(A, B,C, D,E, WR00, SR00, KR0);
331 RIP5(E, A,B, C,D, WR01, SR01, KR0);
332 RIP5(D, E,A, B,C, WR02, SR02, KR0);
333 RIP5(C, D,E, A,B, WR03, SR03, KR0);
334 RIP5(B, C,D, E,A, WR04, SR04, KR0);
335 RIP5(A, B,C, D,E, WR05, SR05, KR0);
336 RIP5(E, A,B, C,D, WR06, SR06, KR0);
337 RIP5(D, E,A, B,C, WR07, SR07, KR0);
338 RIP5(C, D,E, A,B, WR08, SR08, KR0);
339 RIP5(B, C,D, E,A, WR09, SR09, KR0);
340 RIP5(A, B,C, D,E, WR10, SR10, KR0);
341 RIP5(E, A,B, C,D, WR11, SR11, KR0);
342 RIP5(D, E,A, B,C, WR12, SR12, KR0);
343 RIP5(C, D,E, A,B, WR13, SR13, KR0);
344 RIP5(B, C,D, E,A, WR14, SR14, KR0);
345 RIP5(A, B,C, D,E, WR15, SR15, KR0);
346
347 RIP4(E, A,B, C,D, WR16, SR16, KR1);
348 RIP4(D, E,A, B,C, WR17, SR17, KR1);
349 RIP4(C, D,E, A,B, WR18, SR18, KR1);
350 RIP4(B, C,D, E,A, WR19, SR19, KR1);
351 RIP4(A, B,C, D,E, WR20, SR20, KR1);
352 RIP4(E, A,B, C,D, WR21, SR21, KR1);
353 RIP4(D, E,A, B,C, WR22, SR22, KR1);
354 RIP4(C, D,E, A,B, WR23, SR23, KR1);
355 RIP4(B, C,D, E,A, WR24, SR24, KR1);
356 RIP4(A, B,C, D,E, WR25, SR25, KR1);
357 RIP4(E, A,B, C,D, WR26, SR26, KR1);
358 RIP4(D, E,A, B,C, WR27, SR27, KR1);
359 RIP4(C, D,E, A,B, WR28, SR28, KR1);
360 RIP4(B, C,D, E,A, WR29, SR29, KR1);
361 RIP4(A, B,C, D,E, WR30, SR30, KR1);
362 RIP4(E, A,B, C,D, WR31, SR31, KR1);
363
364 RIP3(D, E,A, B,C, WR32, SR32, KR2);
365 RIP3(C, D,E, A,B, WR33, SR33, KR2);
366 RIP3(B, C,D, E,A, WR34, SR34, KR2);
367 RIP3(A, B,C, D,E, WR35, SR35, KR2);
368 RIP3(E, A,B, C,D, WR36, SR36, KR2);
369 RIP3(D, E,A, B,C, WR37, SR37, KR2);
370 RIP3(C, D,E, A,B, WR38, SR38, KR2);
371 RIP3(B, C,D, E,A, WR39, SR39, KR2);
372 RIP3(A, B,C, D,E, WR40, SR40, KR2);
373 RIP3(E, A,B, C,D, WR41, SR41, KR2);
374 RIP3(D, E,A, B,C, WR42, SR42, KR2);
375 RIP3(C, D,E, A,B, WR43, SR43, KR2);
376 RIP3(B, C,D, E,A, WR44, SR44, KR2);
377 RIP3(A, B,C, D,E, WR45, SR45, KR2);
378 RIP3(E, A,B, C,D, WR46, SR46, KR2);
379 RIP3(D, E,A, B,C, WR47, SR47, KR2);
380
381 RIP2(C, D,E, A,B, WR48, SR48, KR3);
382 RIP2(B, C,D, E,A, WR49, SR49, KR3);
383 RIP2(A, B,C, D,E, WR50, SR50, KR3);
384 RIP2(E, A,B, C,D, WR51, SR51, KR3);
385 RIP2(D, E,A, B,C, WR52, SR52, KR3);
386 RIP2(C, D,E, A,B, WR53, SR53, KR3);
387 RIP2(B, C,D, E,A, WR54, SR54, KR3);
388 RIP2(A, B,C, D,E, WR55, SR55, KR3);
389 RIP2(E, A,B, C,D, WR56, SR56, KR3);
390 RIP2(D, E,A, B,C, WR57, SR57, KR3);
391 RIP2(C, D,E, A,B, WR58, SR58, KR3);
392 RIP2(B, C,D, E,A, WR59, SR59, KR3);
393 RIP2(A, B,C, D,E, WR60, SR60, KR3);
394 RIP2(E, A,B, C,D, WR61, SR61, KR3);
395 RIP2(D, E,A, B,C, WR62, SR62, KR3);
396 RIP2(C, D,E, A,B, WR63, SR63, KR3);
397
398 RIP1(B, C,D, E,A, WR64, SR64);
399 RIP1(A, B,C, D,E, WR65, SR65);
400 RIP1(E, A,B, C,D, WR66, SR66);
401 RIP1(D, E,A, B,C, WR67, SR67);
402 RIP1(C, D,E, A,B, WR68, SR68);
403 RIP1(B, C,D, E,A, WR69, SR69);
404 RIP1(A, B,C, D,E, WR70, SR70);
405 RIP1(E, A,B, C,D, WR71, SR71);
406 RIP1(D, E,A, B,C, WR72, SR72);
407 RIP1(C, D,E, A,B, WR73, SR73);
408 RIP1(B, C,D, E,A, WR74, SR74);
409 RIP1(A, B,C, D,E, WR75, SR75);
410 RIP1(E, A,B, C,D, WR76, SR76);
411 RIP1(D, E,A, B,C, WR77, SR77);
412 RIP1(C, D,E, A,B, WR78, SR78);
413 RIP1(B, C,D, E,A, WR79, SR79);
414
415 D = ctx->B + c + D;
416 ctx->B = ctx->C + d + E;
417 ctx->C = ctx->D + e + A;
418 ctx->D = ctx->E + a + B;
419 ctx->E = ctx->A + b + C;
420 ctx->A = D;
421
422 }
423}
424#endif
425
426unsigned char *
427RIPEMD160(const unsigned char *d, size_t n,
428 unsigned char *md)
429{
430 RIPEMD160_CTX c;
431 static unsigned char m[RIPEMD160_DIGEST_LENGTH];
432
433 if (md == NULL)
434 md = m;
435 if (!RIPEMD160_Init(&c))
436 return NULL;
437 RIPEMD160_Update(&c, d, n);
438 RIPEMD160_Final(md, &c);
439 explicit_bzero(&c, sizeof(c));
440 return (md);
441}