61 #if defined(__ARM_FEATURE_UNALIGNED) || defined(__i386) || defined(_M_IX86) || defined(__x86_64__) || defined(_M_X64) 62 #define XXH_USE_UNALIGNED_ACCESS 1 79 #define XXH_FORCE_NATIVE_FORMAT 0 85 #ifdef _MSC_VER // Visual Studio 86 #pragma warning(disable : 4127) // disable: C4127: conditional expression is constant 89 #ifdef _MSC_VER // Visual Studio 90 #define FORCE_INLINE static __forceinline 93 #define FORCE_INLINE static inline __attribute__((always_inline)) 95 #define FORCE_INLINE static inline 107 #if defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L // C99 109 typedef uint8_t BYTE;
110 typedef uint16_t U16;
111 typedef uint32_t U32;
113 typedef uint64_t U64;
115 typedef unsigned char BYTE;
116 typedef unsigned short U16;
117 typedef unsigned int U32;
118 typedef signed int S32;
119 typedef unsigned long long U64;
122 #if defined(__GNUC__) && !defined(XXH_USE_UNALIGNED_ACCESS) 123 #define _PACKED __attribute__ ((packed)) 128 #if !defined(XXH_USE_UNALIGNED_ACCESS) && !defined(__GNUC__) 132 #pragma pack(push, 1) 143 #if !defined(XXH_USE_UNALIGNED_ACCESS) && !defined(__GNUC__) 147 #define A32(x) (((U32_S *)(x))->v) 148 #define A64(x) (((U64_S *)(x))->v) 153 #define GCC_VERSION (__GNUC__ * 100 + __GNUC_MINOR__) 156 #if defined(_MSC_VER) 157 #define XXH_rotl32(x,r) _rotl(x,r) 158 #define XXH_rotl64(x,r) _rotl64(x,r) 160 #define XXH_rotl32(x,r) ((x << r) | (x >> (32 - r))) 161 #define XXH_rotl64(x,r) ((x << r) | (x >> (64 - r))) 164 #if defined(_MSC_VER) // Visual Studio 165 #define XXH_swap32 _byteswap_ulong 166 #define XXH_swap64 _byteswap_uint64 167 #elif GCC_VERSION >= 403 168 #define XXH_swap32 __builtin_bswap32 169 #define XXH_swap64 __builtin_bswap64 171 static inline U32 XXH_swap32(U32 x)
173 return ((x << 24) & 0xff000000) |
174 ((x << 8) & 0x00ff0000) |
175 ((x >> 8) & 0x0000ff00) | ((x >> 24) & 0x000000ff);
178 static inline U64 XXH_swap64(U64 x)
180 return ((x << 56) & 0xff00000000000000ULL) |
181 ((x << 40) & 0x00ff000000000000ULL) |
182 ((x << 24) & 0x0000ff0000000000ULL) |
183 ((x << 8) & 0x000000ff00000000ULL) |
184 ((x >> 8) & 0x00000000ff000000ULL) |
185 ((x >> 24) & 0x0000000000ff0000ULL) |
186 ((x >> 40) & 0x000000000000ff00ULL) |
187 ((x >> 56) & 0x00000000000000ffULL);
194 #define PRIME32_1 2654435761U 195 #define PRIME32_2 2246822519U 196 #define PRIME32_3 3266489917U 197 #define PRIME32_4 668265263U 198 #define PRIME32_5 374761393U 200 #define PRIME64_1 11400714785074694791ULL 201 #define PRIME64_2 14029467366897019727ULL 202 #define PRIME64_3 1609587929392839161ULL 203 #define PRIME64_4 9650029242287828579ULL 204 #define PRIME64_5 2870177450012600261ULL 209 typedef enum { XXH_bigEndian = 0, XXH_littleEndian = 1 } XXH_endianess;
210 #ifndef XXH_CPU_LITTLE_ENDIAN // It is possible to define XXH_CPU_LITTLE_ENDIAN externally, for example using a compiler switch 211 static const int one = 1;
212 #define XXH_CPU_LITTLE_ENDIAN (*(char*)(&one)) 218 #define XXH_STATIC_ASSERT(c) { enum { XXH_static_assert = 1/(!!(c)) }; } // use only *after* variable declarations 223 typedef enum { XXH_aligned, XXH_unaligned } XXH_alignment;
225 FORCE_INLINE U32 XXH_readLE32_align(
const void *ptr, XXH_endianess endian,
228 if (align == XXH_unaligned)
230 XXH_littleEndian ? A32(ptr) : XXH_swap32(A32(ptr));
233 XXH_littleEndian ? *(U32 *) ptr : XXH_swap32(*(U32 *) ptr);
236 FORCE_INLINE U32 XXH_readLE32(
const void *ptr, XXH_endianess endian)
238 return XXH_readLE32_align(ptr, endian, XXH_unaligned);
241 FORCE_INLINE U64 XXH_readLE64_align(
const void *ptr, XXH_endianess endian,
244 if (align == XXH_unaligned)
246 XXH_littleEndian ? A64(ptr) : XXH_swap64(A64(ptr));
249 XXH_littleEndian ? *(U64 *) ptr : XXH_swap64(*(U64 *) ptr);
252 FORCE_INLINE U64 XXH_readLE64(
const void *ptr, XXH_endianess endian)
254 return XXH_readLE64_align(ptr, endian, XXH_unaligned);
260 FORCE_INLINE U32 XXH32_endian_align(
const void *input,
size_t len, U32 seed,
261 XXH_endianess endian, XXH_alignment align)
263 const BYTE *p = (
const BYTE *)input;
264 const BYTE *bEnd = p + len;
266 #define XXH_get32bits(p) XXH_readLE32_align(p, endian, align) 268 #ifdef XXH_ACCEPT_NULL_INPUT_POINTER 271 bEnd = p = (
const BYTE *)(
size_t)16;
276 const BYTE *
const limit = bEnd - 16;
277 U32 v1 = seed + PRIME32_1 + PRIME32_2;
278 U32 v2 = seed + PRIME32_2;
280 U32 v4 = seed - PRIME32_1;
283 v1 += XXH_get32bits(p) * PRIME32_2;
284 v1 = XXH_rotl32(v1, 13);
287 v2 += XXH_get32bits(p) * PRIME32_2;
288 v2 = XXH_rotl32(v2, 13);
291 v3 += XXH_get32bits(p) * PRIME32_2;
292 v3 = XXH_rotl32(v3, 13);
295 v4 += XXH_get32bits(p) * PRIME32_2;
296 v4 = XXH_rotl32(v4, 13);
303 XXH_rotl32(v1, 1) + XXH_rotl32(v2, 7) + XXH_rotl32(v3,
307 h32 = seed + PRIME32_5;
312 while (p + 4 <= bEnd) {
313 h32 += XXH_get32bits(p) * PRIME32_3;
314 h32 = XXH_rotl32(h32, 17) * PRIME32_4;
319 h32 += (*p) * PRIME32_5;
320 h32 = XXH_rotl32(h32, 11) * PRIME32_1;
333 unsigned int XXH32(
const void *input,
size_t len,
unsigned seed)
338 XXH32_reset(&state, seed);
339 XXH32_update(&state, input, len);
340 return XXH32_digest(&state);
342 XXH_endianess endian_detected = (XXH_endianess) XXH_CPU_LITTLE_ENDIAN;
344 #if !defined(XXH_USE_UNALIGNED_ACCESS) 345 if ((((
size_t)input) & 3) == 0)
347 if ((endian_detected == XXH_littleEndian)
348 || XXH_FORCE_NATIVE_FORMAT)
349 return XXH32_endian_align(input, len, seed,
353 return XXH32_endian_align(input, len, seed,
354 XXH_bigEndian, XXH_aligned);
358 if ((endian_detected == XXH_littleEndian) || XXH_FORCE_NATIVE_FORMAT)
359 return XXH32_endian_align(input, len, seed, XXH_littleEndian,
362 return XXH32_endian_align(input, len, seed, XXH_bigEndian,
367 FORCE_INLINE U64 XXH64_endian_align(
const void *input,
size_t len, U64 seed,
368 XXH_endianess endian, XXH_alignment align)
370 const BYTE *p = (
const BYTE *)input;
371 const BYTE *bEnd = p + len;
373 #define XXH_get64bits(p) XXH_readLE64_align(p, endian, align) 375 #ifdef XXH_ACCEPT_NULL_INPUT_POINTER 378 bEnd = p = (
const BYTE *)(
size_t)32;
383 const BYTE *
const limit = bEnd - 32;
384 U64 v1 = seed + PRIME64_1 + PRIME64_2;
385 U64 v2 = seed + PRIME64_2;
387 U64 v4 = seed - PRIME64_1;
390 v1 += XXH_get64bits(p) * PRIME64_2;
392 v1 = XXH_rotl64(v1, 31);
394 v2 += XXH_get64bits(p) * PRIME64_2;
396 v2 = XXH_rotl64(v2, 31);
398 v3 += XXH_get64bits(p) * PRIME64_2;
400 v3 = XXH_rotl64(v3, 31);
402 v4 += XXH_get64bits(p) * PRIME64_2;
404 v4 = XXH_rotl64(v4, 31);
410 XXH_rotl64(v1, 1) + XXH_rotl64(v2, 7) + XXH_rotl64(v3,
415 v1 = XXH_rotl64(v1, 31);
418 h64 = h64 * PRIME64_1 + PRIME64_4;
421 v2 = XXH_rotl64(v2, 31);
424 h64 = h64 * PRIME64_1 + PRIME64_4;
427 v3 = XXH_rotl64(v3, 31);
430 h64 = h64 * PRIME64_1 + PRIME64_4;
433 v4 = XXH_rotl64(v4, 31);
436 h64 = h64 * PRIME64_1 + PRIME64_4;
438 h64 = seed + PRIME64_5;
443 while (p + 8 <= bEnd) {
444 U64 k1 = XXH_get64bits(p);
446 k1 = XXH_rotl64(k1, 31);
449 h64 = XXH_rotl64(h64, 27) * PRIME64_1 + PRIME64_4;
454 h64 ^= (U64) (XXH_get32bits(p)) * PRIME64_1;
455 h64 = XXH_rotl64(h64, 23) * PRIME64_2 + PRIME64_3;
460 h64 ^= (*p) * PRIME64_5;
461 h64 = XXH_rotl64(h64, 11) * PRIME64_1;
474 unsigned long long XXH64(
const void *input,
size_t len,
unsigned long long seed)
479 XXH64_reset(&state, seed);
480 XXH64_update(&state, input, len);
481 return XXH64_digest(&state);
483 XXH_endianess endian_detected = (XXH_endianess) XXH_CPU_LITTLE_ENDIAN;
485 #if !defined(XXH_USE_UNALIGNED_ACCESS) 486 if ((((
size_t)input) & 7) == 0)
488 if ((endian_detected == XXH_littleEndian)
489 || XXH_FORCE_NATIVE_FORMAT)
490 return XXH64_endian_align(input, len, seed,
494 return XXH64_endian_align(input, len, seed,
495 XXH_bigEndian, XXH_aligned);
499 if ((endian_detected == XXH_littleEndian) || XXH_FORCE_NATIVE_FORMAT)
500 return XXH64_endian_align(input, len, seed, XXH_littleEndian,
503 return XXH64_endian_align(input, len, seed, XXH_bigEndian,