aboutsummaryrefslogtreecommitdiff
path: root/C/Xxh64.c
diff options
context:
space:
mode:
authorIgor Pavlov <87184205+ip7z@users.noreply.github.com>2026-02-12 00:00:00 +0000
committerIgor Pavlov <87184205+ip7z@users.noreply.github.com>2026-02-12 17:38:49 +0500
commit839151eaaad24771892afaae6bac690e31e58384 (patch)
treee292449d621f7a1d503b975984a2aca240dd2d8f /C/Xxh64.c
parent5e96a8279489832924056b1fa82f29d5837c9469 (diff)
download7zip-26.00.tar.gz
7zip-26.00.tar.bz2
7zip-26.00.zip
Diffstat (limited to 'C/Xxh64.c')
-rw-r--r--C/Xxh64.c98
1 files changed, 80 insertions, 18 deletions
diff --git a/C/Xxh64.c b/C/Xxh64.c
index dc02a02..660e0be 100644
--- a/C/Xxh64.c
+++ b/C/Xxh64.c
@@ -1,6 +1,6 @@
1/* Xxh64.c -- XXH64 hash calculation 1/* Xxh64.c -- XXH64 hash calculation
2original code: Copyright (c) Yann Collet. 2original code: Copyright (c) Yann Collet.
32023-08-18 : modified by Igor Pavlov. 3modified by Igor Pavlov.
4This source code is licensed under BSD 2-Clause License. 4This source code is licensed under BSD 2-Clause License.
5*/ 5*/
6 6
@@ -27,6 +27,14 @@ void Xxh64State_Init(CXxh64State *p)
27 27
28#if !defined(MY_CPU_64BIT) && defined(MY_CPU_X86) && defined(_MSC_VER) 28#if !defined(MY_CPU_64BIT) && defined(MY_CPU_X86) && defined(_MSC_VER)
29 #define Z7_XXH64_USE_ASM 29 #define Z7_XXH64_USE_ASM
30#elif !defined(MY_CPU_LE_UNALIGN_64) // && defined (MY_CPU_LE)
31 #define Z7_XXH64_USE_ALIGNED
32#endif
33
34#ifdef Z7_XXH64_USE_ALIGNED
35 #define Xxh64State_UpdateBlocks_Unaligned_Select Xxh64State_UpdateBlocks_Unaligned
36#else
37 #define Xxh64State_UpdateBlocks_Unaligned_Select Xxh64State_UpdateBlocks
30#endif 38#endif
31 39
32#if !defined(MY_CPU_64BIT) && defined(MY_CPU_X86) \ 40#if !defined(MY_CPU_64BIT) && defined(MY_CPU_X86) \
@@ -188,32 +196,76 @@ Xxh64State_UpdateBlocks(CXxh64State *p, const void *data, const void *end)
188 196
189#else 197#else
190 198
199#ifdef Z7_XXH64_USE_ALIGNED
200static
201#endif
191void 202void
192Z7_NO_INLINE 203Z7_NO_INLINE
193Z7_FASTCALL 204Z7_FASTCALL
194Xxh64State_UpdateBlocks(CXxh64State *p, const void *_data, const void *end) 205Xxh64State_UpdateBlocks_Unaligned_Select(CXxh64State *p, const void *_data, const void *end)
195{ 206{
196 const Byte *data = (const Byte *)_data; 207 const Byte *data = (const Byte *)_data;
197 UInt64 v[4]; 208 UInt64 v0, v1, v2, v3;
198 v[0] = p->v[0]; 209 v0 = p->v[0];
199 v[1] = p->v[1]; 210 v1 = p->v[1];
200 v[2] = p->v[2]; 211 v2 = p->v[2];
201 v[3] = p->v[3]; 212 v3 = p->v[3];
202 do 213 do
203 { 214 {
204 v[0] = Xxh64_Round(v[0], GetUi64(data)); data += 8; 215 v0 = Xxh64_Round(v0, GetUi64(data)); data += 8;
205 v[1] = Xxh64_Round(v[1], GetUi64(data)); data += 8; 216 v1 = Xxh64_Round(v1, GetUi64(data)); data += 8;
206 v[2] = Xxh64_Round(v[2], GetUi64(data)); data += 8; 217 v2 = Xxh64_Round(v2, GetUi64(data)); data += 8;
207 v[3] = Xxh64_Round(v[3], GetUi64(data)); data += 8; 218 v3 = Xxh64_Round(v3, GetUi64(data)); data += 8;
208 } 219 }
209 while (data != end); 220 while (data != end);
210 p->v[0] = v[0]; 221 p->v[0] = v0;
211 p->v[1] = v[1]; 222 p->v[1] = v1;
212 p->v[2] = v[2]; 223 p->v[2] = v2;
213 p->v[3] = v[3]; 224 p->v[3] = v3;
214} 225}
215 226
216#endif 227
228#ifdef Z7_XXH64_USE_ALIGNED
229
230static
231void
232Z7_NO_INLINE
233Z7_FASTCALL
234Xxh64State_UpdateBlocks_Aligned(CXxh64State *p, const void *_data, const void *end)
235{
236 const Byte *data = (const Byte *)_data;
237 UInt64 v0, v1, v2, v3;
238 v0 = p->v[0];
239 v1 = p->v[1];
240 v2 = p->v[2];
241 v3 = p->v[3];
242 do
243 {
244 v0 = Xxh64_Round(v0, GetUi64a(data)); data += 8;
245 v1 = Xxh64_Round(v1, GetUi64a(data)); data += 8;
246 v2 = Xxh64_Round(v2, GetUi64a(data)); data += 8;
247 v3 = Xxh64_Round(v3, GetUi64a(data)); data += 8;
248 }
249 while (data != end);
250 p->v[0] = v0;
251 p->v[1] = v1;
252 p->v[2] = v2;
253 p->v[3] = v3;
254}
255
256void
257Z7_NO_INLINE
258Z7_FASTCALL
259Xxh64State_UpdateBlocks(CXxh64State *p, const void *data, const void *end)
260{
261 if (((unsigned)(ptrdiff_t)data & 7) == 0)
262 Xxh64State_UpdateBlocks_Aligned(p, data, end);
263 else
264 Xxh64State_UpdateBlocks_Unaligned(p, data, end);
265}
266
267#endif // Z7_XXH64_USE_ALIGNED
268#endif // Z7_XXH64_USE_ASM
217 269
218UInt64 Xxh64State_Digest(const CXxh64State *p, const void *_data, UInt64 count) 270UInt64 Xxh64State_Digest(const CXxh64State *p, const void *_data, UInt64 count)
219{ 271{
@@ -306,12 +358,22 @@ void Xxh64_Update(CXxh64 *p, const void *_data, size_t size)
306 while (--rem); 358 while (--rem);
307 if (cnt != 32) 359 if (cnt != 32)
308 return; 360 return;
309 Xxh64State_UpdateBlocks(&p->state, p->buf64, &p->buf64[4]); 361#ifdef Z7_XXH64_USE_ALIGNED
362 Xxh64State_UpdateBlocks_Aligned
363#else
364 Xxh64State_UpdateBlocks_Unaligned_Select
365#endif
366 (&p->state, p->buf64, &p->buf64[4]);
310 } 367 }
311 368
312 if (size &= ~(size_t)31) 369 if (size &= ~(size_t)31)
313 { 370 {
314 Xxh64State_UpdateBlocks(&p->state, data, data + size); 371#ifdef Z7_XXH64_USE_ALIGNED
372 if (((unsigned)(ptrdiff_t)data & 7) == 0)
373 Xxh64State_UpdateBlocks_Aligned(&p->state, data, data + size);
374 else
375#endif
376 Xxh64State_UpdateBlocks_Unaligned_Select(&p->state, data, data + size);
315 data += size; 377 data += size;
316 } 378 }
317 379