1868e25b9SDmitri Tikhonov/* 2868e25b9SDmitri TikhonovxxHash - Fast Hash algorithm 3868e25b9SDmitri TikhonovCopyright (C) 2012-2014, Yann Collet. 4868e25b9SDmitri TikhonovBSD 2-Clause License (http://www.opensource.org/licenses/bsd-license.php) 5868e25b9SDmitri Tikhonov 6868e25b9SDmitri TikhonovRedistribution and use in source and binary forms, with or without 7868e25b9SDmitri Tikhonovmodification, are permitted provided that the following conditions are 8868e25b9SDmitri Tikhonovmet: 9868e25b9SDmitri Tikhonov 10868e25b9SDmitri Tikhonov* Redistributions of source code must retain the above copyright 11868e25b9SDmitri Tikhonovnotice, this list of conditions and the following disclaimer. 12868e25b9SDmitri Tikhonov* Redistributions in binary form must reproduce the above 13868e25b9SDmitri Tikhonovcopyright notice, this list of conditions and the following disclaimer 14868e25b9SDmitri Tikhonovin the documentation and/or other materials provided with the 15868e25b9SDmitri Tikhonovdistribution. 16868e25b9SDmitri Tikhonov 17868e25b9SDmitri TikhonovTHIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 18868e25b9SDmitri Tikhonov"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 19868e25b9SDmitri TikhonovLIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 20868e25b9SDmitri TikhonovA PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 21868e25b9SDmitri TikhonovOWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 22868e25b9SDmitri TikhonovSPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 23868e25b9SDmitri TikhonovLIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 24868e25b9SDmitri TikhonovDATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 25868e25b9SDmitri TikhonovTHEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 26868e25b9SDmitri Tikhonov(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 27868e25b9SDmitri TikhonovOF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 28868e25b9SDmitri Tikhonov 29868e25b9SDmitri TikhonovYou can contact the author at : 30868e25b9SDmitri Tikhonov- xxHash source repository : http://code.google.com/p/xxhash/ 31868e25b9SDmitri Tikhonov- public discussion board : https://groups.google.com/forum/#!forum/lz4c 32868e25b9SDmitri Tikhonov*/ 33868e25b9SDmitri Tikhonov 34868e25b9SDmitri Tikhonov 35868e25b9SDmitri Tikhonov//************************************** 36868e25b9SDmitri Tikhonov// Tuning parameters 37868e25b9SDmitri Tikhonov//************************************** 38868e25b9SDmitri Tikhonov// Unaligned memory access is automatically enabled for "common" CPU, such as x86. 39868e25b9SDmitri Tikhonov// For others CPU, the compiler will be more cautious, and insert extra code to ensure aligned access is respected. 40868e25b9SDmitri Tikhonov// If you know your target CPU supports unaligned memory access, you want to force this option manually to improve performance. 41868e25b9SDmitri Tikhonov// You can also enable this parameter if you know your input data will always be aligned (boundaries of 4, for U32). 42868e25b9SDmitri Tikhonov#if defined(__ARM_FEATURE_UNALIGNED) || defined(__i386) || defined(_M_IX86) || defined(__x86_64__) || defined(_M_X64) 43868e25b9SDmitri Tikhonov# define XXH_USE_UNALIGNED_ACCESS 1 44868e25b9SDmitri Tikhonov#endif 45868e25b9SDmitri Tikhonov 46868e25b9SDmitri Tikhonov// XXH_ACCEPT_NULL_INPUT_POINTER : 47868e25b9SDmitri Tikhonov// If the input pointer is a null pointer, xxHash default behavior is to trigger a memory access error, since it is a bad pointer. 48868e25b9SDmitri Tikhonov// When this option is enabled, xxHash output for null input pointers will be the same as a null-length input. 49868e25b9SDmitri Tikhonov// This option has a very small performance cost (only measurable on small inputs). 50868e25b9SDmitri Tikhonov// By default, this option is disabled. To enable it, uncomment below define : 51868e25b9SDmitri Tikhonov// #define XXH_ACCEPT_NULL_INPUT_POINTER 1 52868e25b9SDmitri Tikhonov 53868e25b9SDmitri Tikhonov// XXH_FORCE_NATIVE_FORMAT : 54868e25b9SDmitri Tikhonov// By default, xxHash library provides endian-independant Hash values, based on little-endian convention. 55868e25b9SDmitri Tikhonov// Results are therefore identical for little-endian and big-endian CPU. 56868e25b9SDmitri Tikhonov// This comes at a performance cost for big-endian CPU, since some swapping is required to emulate little-endian format. 57868e25b9SDmitri Tikhonov// Should endian-independance be of no importance for your application, you may set the #define below to 1. 58868e25b9SDmitri Tikhonov// It will improve speed for Big-endian CPU. 59868e25b9SDmitri Tikhonov// This option has no impact on Little_Endian CPU. 60868e25b9SDmitri Tikhonov#define XXH_FORCE_NATIVE_FORMAT 0 61868e25b9SDmitri Tikhonov 62868e25b9SDmitri Tikhonov//************************************** 63868e25b9SDmitri Tikhonov// Compiler Specific Options 64868e25b9SDmitri Tikhonov//************************************** 65868e25b9SDmitri Tikhonov// Disable some Visual warning messages 66868e25b9SDmitri Tikhonov#ifdef _MSC_VER // Visual Studio 67868e25b9SDmitri Tikhonov# pragma warning(disable : 4127) // disable: C4127: conditional expression is constant 68868e25b9SDmitri Tikhonov#endif 69868e25b9SDmitri Tikhonov 70868e25b9SDmitri Tikhonov#ifdef _MSC_VER // Visual Studio 71868e25b9SDmitri Tikhonov# define FORCE_INLINE static __forceinline 72868e25b9SDmitri Tikhonov#else 73868e25b9SDmitri Tikhonov# ifdef __GNUC__ 74868e25b9SDmitri Tikhonov# define FORCE_INLINE static inline __attribute__((always_inline)) 75868e25b9SDmitri Tikhonov# else 76868e25b9SDmitri Tikhonov# define FORCE_INLINE static inline 77868e25b9SDmitri Tikhonov# endif 78868e25b9SDmitri Tikhonov#endif 79868e25b9SDmitri Tikhonov 80868e25b9SDmitri Tikhonov//************************************** 81868e25b9SDmitri Tikhonov// Includes & Memory related functions 82868e25b9SDmitri Tikhonov//************************************** 83868e25b9SDmitri Tikhonov#include "xxhash.h" 84868e25b9SDmitri Tikhonov// Modify the local functions below should you wish to use some other memory routines 85868e25b9SDmitri Tikhonov// for malloc(), free() 86868e25b9SDmitri Tikhonov#include <stdlib.h> 87868e25b9SDmitri Tikhonovstatic void *XXH_malloc(size_t s) { return malloc(s); } 88868e25b9SDmitri Tikhonovstatic void XXH_free(void *p) { free(p); } 89868e25b9SDmitri Tikhonov// for memcpy() 90868e25b9SDmitri Tikhonov#include <string.h> 91868e25b9SDmitri Tikhonovstatic void *XXH_memcpy(void *dest, const void *src, size_t size) 92868e25b9SDmitri Tikhonov{ 93868e25b9SDmitri Tikhonov return memcpy(dest, src, size); 94868e25b9SDmitri Tikhonov} 95868e25b9SDmitri Tikhonov 96868e25b9SDmitri Tikhonov 97868e25b9SDmitri Tikhonov//************************************** 98868e25b9SDmitri Tikhonov// Basic Types 99868e25b9SDmitri Tikhonov//************************************** 100868e25b9SDmitri Tikhonov#if defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L // C99 101868e25b9SDmitri Tikhonov# include <stdint.h> 102868e25b9SDmitri Tikhonovtypedef uint8_t BYTE; 103868e25b9SDmitri Tikhonovtypedef uint16_t U16; 104868e25b9SDmitri Tikhonovtypedef uint32_t U32; 105868e25b9SDmitri Tikhonovtypedef int32_t S32; 106868e25b9SDmitri Tikhonovtypedef uint64_t U64; 107868e25b9SDmitri Tikhonov#else 108868e25b9SDmitri Tikhonovtypedef unsigned char BYTE; 109868e25b9SDmitri Tikhonovtypedef unsigned short U16; 110868e25b9SDmitri Tikhonovtypedef unsigned int U32; 111868e25b9SDmitri Tikhonovtypedef signed int S32; 112868e25b9SDmitri Tikhonovtypedef unsigned long long U64; 113868e25b9SDmitri Tikhonov#endif 114868e25b9SDmitri Tikhonov 115868e25b9SDmitri Tikhonov#if defined(__GNUC__) && !defined(XXH_USE_UNALIGNED_ACCESS) 116868e25b9SDmitri Tikhonov# define _PACKED __attribute__ ((packed)) 117868e25b9SDmitri Tikhonov#else 118868e25b9SDmitri Tikhonov# define _PACKED 119868e25b9SDmitri Tikhonov#endif 120868e25b9SDmitri Tikhonov 121868e25b9SDmitri Tikhonov#if !defined(XXH_USE_UNALIGNED_ACCESS) && !defined(__GNUC__) 122868e25b9SDmitri Tikhonov# ifdef __IBMC__ 123868e25b9SDmitri Tikhonov# pragma pack(1) 124868e25b9SDmitri Tikhonov# else 125868e25b9SDmitri Tikhonov# pragma pack(push, 1) 126868e25b9SDmitri Tikhonov# endif 127868e25b9SDmitri Tikhonov#endif 128868e25b9SDmitri Tikhonov 129868e25b9SDmitri Tikhonovtypedef struct _U32_S 130868e25b9SDmitri Tikhonov{ 131868e25b9SDmitri Tikhonov U32 v; 132868e25b9SDmitri Tikhonov} _PACKED U32_S; 133868e25b9SDmitri Tikhonovtypedef struct _U64_S 134868e25b9SDmitri Tikhonov{ 135868e25b9SDmitri Tikhonov U64 v; 136868e25b9SDmitri Tikhonov} _PACKED U64_S; 137868e25b9SDmitri Tikhonov 138868e25b9SDmitri Tikhonov#if !defined(XXH_USE_UNALIGNED_ACCESS) && !defined(__GNUC__) 139868e25b9SDmitri Tikhonov# pragma pack(pop) 140868e25b9SDmitri Tikhonov#endif 141868e25b9SDmitri Tikhonov 142868e25b9SDmitri Tikhonov#define A32(x) (((U32_S *)(x))->v) 143868e25b9SDmitri Tikhonov#define A64(x) (((U64_S *)(x))->v) 144868e25b9SDmitri Tikhonov 145868e25b9SDmitri Tikhonov 146868e25b9SDmitri Tikhonov//*************************************** 147868e25b9SDmitri Tikhonov// Compiler-specific Functions and Macros 148868e25b9SDmitri Tikhonov//*************************************** 149868e25b9SDmitri Tikhonov#define GCC_VERSION (__GNUC__ * 100 + __GNUC_MINOR__) 150868e25b9SDmitri Tikhonov 151868e25b9SDmitri Tikhonov// Note : although _rotl exists for minGW (GCC under windows), performance seems poor 152868e25b9SDmitri Tikhonov#if defined(_MSC_VER) 153868e25b9SDmitri Tikhonov# define XXH_rotl32(x,r) _rotl(x,r) 154868e25b9SDmitri Tikhonov# define XXH_rotl64(x,r) _rotl64(x,r) 155868e25b9SDmitri Tikhonov#else 156868e25b9SDmitri Tikhonov# define XXH_rotl32(x,r) ((x << r) | (x >> (32 - r))) 157868e25b9SDmitri Tikhonov# define XXH_rotl64(x,r) ((x << r) | (x >> (64 - r))) 158868e25b9SDmitri Tikhonov#endif 159868e25b9SDmitri Tikhonov 160868e25b9SDmitri Tikhonov#if defined(_MSC_VER) // Visual Studio 161868e25b9SDmitri Tikhonov# define XXH_swap32 _byteswap_ulong 162868e25b9SDmitri Tikhonov# define XXH_swap64 _byteswap_uint64 163868e25b9SDmitri Tikhonov#elif GCC_VERSION >= 403 164868e25b9SDmitri Tikhonov# define XXH_swap32 __builtin_bswap32 165868e25b9SDmitri Tikhonov# define XXH_swap64 __builtin_bswap64 166868e25b9SDmitri Tikhonov#else 167868e25b9SDmitri Tikhonovstatic inline U32 XXH_swap32(U32 x) 168868e25b9SDmitri Tikhonov{ 169868e25b9SDmitri Tikhonov return ((x << 24) & 0xff000000) | 170868e25b9SDmitri Tikhonov ((x << 8) & 0x00ff0000) | 171868e25b9SDmitri Tikhonov ((x >> 8) & 0x0000ff00) | 172868e25b9SDmitri Tikhonov ((x >> 24) & 0x000000ff); 173868e25b9SDmitri Tikhonov} 174868e25b9SDmitri Tikhonovstatic inline U64 XXH_swap64(U64 x) 175868e25b9SDmitri Tikhonov{ 176868e25b9SDmitri Tikhonov return ((x << 56) & 0xff00000000000000ULL) | 177868e25b9SDmitri Tikhonov ((x << 40) & 0x00ff000000000000ULL) | 178868e25b9SDmitri Tikhonov ((x << 24) & 0x0000ff0000000000ULL) | 179868e25b9SDmitri Tikhonov ((x << 8) & 0x000000ff00000000ULL) | 180868e25b9SDmitri Tikhonov ((x >> 8) & 0x00000000ff000000ULL) | 181868e25b9SDmitri Tikhonov ((x >> 24) & 0x0000000000ff0000ULL) | 182868e25b9SDmitri Tikhonov ((x >> 40) & 0x000000000000ff00ULL) | 183868e25b9SDmitri Tikhonov ((x >> 56) & 0x00000000000000ffULL); 184868e25b9SDmitri Tikhonov} 185868e25b9SDmitri Tikhonov#endif 186868e25b9SDmitri Tikhonov 187868e25b9SDmitri Tikhonov 188868e25b9SDmitri Tikhonov//************************************** 189868e25b9SDmitri Tikhonov// Constants 190868e25b9SDmitri Tikhonov//************************************** 191868e25b9SDmitri Tikhonov#define PRIME32_1 2654435761U 192868e25b9SDmitri Tikhonov#define PRIME32_2 2246822519U 193868e25b9SDmitri Tikhonov#define PRIME32_3 3266489917U 194868e25b9SDmitri Tikhonov#define PRIME32_4 668265263U 195868e25b9SDmitri Tikhonov#define PRIME32_5 374761393U 196868e25b9SDmitri Tikhonov 197868e25b9SDmitri Tikhonov#define PRIME64_1 11400714785074694791ULL 198868e25b9SDmitri Tikhonov#define PRIME64_2 14029467366897019727ULL 199868e25b9SDmitri Tikhonov#define PRIME64_3 1609587929392839161ULL 200868e25b9SDmitri Tikhonov#define PRIME64_4 9650029242287828579ULL 201868e25b9SDmitri Tikhonov#define PRIME64_5 2870177450012600261ULL 202868e25b9SDmitri Tikhonov 203868e25b9SDmitri Tikhonov//************************************** 204868e25b9SDmitri Tikhonov// Architecture Macros 205868e25b9SDmitri Tikhonov//************************************** 206868e25b9SDmitri Tikhonovtypedef enum { XXH_bigEndian = 0, XXH_littleEndian = 1 } XXH_endianess; 207868e25b9SDmitri Tikhonov#ifndef XXH_CPU_LITTLE_ENDIAN // It is possible to define XXH_CPU_LITTLE_ENDIAN externally, for example using a compiler switch 208868e25b9SDmitri Tikhonovstatic const int one = 1; 209868e25b9SDmitri Tikhonov# define XXH_CPU_LITTLE_ENDIAN (*(char*)(&one)) 210868e25b9SDmitri Tikhonov#endif 211868e25b9SDmitri Tikhonov 212868e25b9SDmitri Tikhonov 213868e25b9SDmitri Tikhonov//************************************** 214868e25b9SDmitri Tikhonov// Macros 215868e25b9SDmitri Tikhonov//************************************** 216868e25b9SDmitri Tikhonov#define XXH_STATIC_ASSERT(c) { enum { XXH_static_assert = 1/(!!(c)) }; } // use only *after* variable declarations 217868e25b9SDmitri Tikhonov 218868e25b9SDmitri Tikhonov 219868e25b9SDmitri Tikhonov//**************************** 220868e25b9SDmitri Tikhonov// Memory reads 221868e25b9SDmitri Tikhonov//**************************** 222868e25b9SDmitri Tikhonovtypedef enum { XXH_aligned, XXH_unaligned } XXH_alignment; 223868e25b9SDmitri Tikhonov 224868e25b9SDmitri TikhonovFORCE_INLINE U32 XXH_readLE32_align(const void *ptr, XXH_endianess endian, 225868e25b9SDmitri Tikhonov XXH_alignment align) 226868e25b9SDmitri Tikhonov{ 227868e25b9SDmitri Tikhonov if (align == XXH_unaligned) 228868e25b9SDmitri Tikhonov return endian == XXH_littleEndian ? A32(ptr) : XXH_swap32(A32(ptr)); 229868e25b9SDmitri Tikhonov else 230868e25b9SDmitri Tikhonov return endian == XXH_littleEndian ? *(U32 *)ptr : XXH_swap32(*(U32 *)ptr); 231868e25b9SDmitri Tikhonov} 232868e25b9SDmitri Tikhonov 233868e25b9SDmitri TikhonovFORCE_INLINE U32 XXH_readLE32(const void *ptr, XXH_endianess endian) 234868e25b9SDmitri Tikhonov{ 235868e25b9SDmitri Tikhonov return XXH_readLE32_align(ptr, endian, XXH_unaligned); 236868e25b9SDmitri Tikhonov} 237868e25b9SDmitri Tikhonov 238868e25b9SDmitri TikhonovFORCE_INLINE U64 XXH_readLE64_align(const void *ptr, XXH_endianess endian, 239868e25b9SDmitri Tikhonov XXH_alignment align) 240868e25b9SDmitri Tikhonov{ 241868e25b9SDmitri Tikhonov if (align == XXH_unaligned) 242868e25b9SDmitri Tikhonov return endian == XXH_littleEndian ? A64(ptr) : XXH_swap64(A64(ptr)); 243868e25b9SDmitri Tikhonov else 244868e25b9SDmitri Tikhonov return endian == XXH_littleEndian ? *(U64 *)ptr : XXH_swap64(*(U64 *)ptr); 245868e25b9SDmitri Tikhonov} 246868e25b9SDmitri Tikhonov 247868e25b9SDmitri TikhonovFORCE_INLINE U64 XXH_readLE64(const void *ptr, XXH_endianess endian) 248868e25b9SDmitri Tikhonov{ 249868e25b9SDmitri Tikhonov return XXH_readLE64_align(ptr, endian, XXH_unaligned); 250868e25b9SDmitri Tikhonov} 251868e25b9SDmitri Tikhonov 252868e25b9SDmitri Tikhonov 253868e25b9SDmitri Tikhonov//**************************** 254868e25b9SDmitri Tikhonov// Simple Hash Functions 255868e25b9SDmitri Tikhonov//**************************** 256868e25b9SDmitri TikhonovFORCE_INLINE U32 XXH32_endian_align(const void *input, size_t len, 257868e25b9SDmitri Tikhonov U32 seed, XXH_endianess endian, XXH_alignment align) 258868e25b9SDmitri Tikhonov{ 259868e25b9SDmitri Tikhonov const BYTE *p = (const BYTE *)input; 260868e25b9SDmitri Tikhonov const BYTE *bEnd = p + len; 261868e25b9SDmitri Tikhonov U32 h32; 262868e25b9SDmitri Tikhonov#define XXH_get32bits(p) XXH_readLE32_align(p, endian, align) 263868e25b9SDmitri Tikhonov 264868e25b9SDmitri Tikhonov#ifdef XXH_ACCEPT_NULL_INPUT_POINTER 265868e25b9SDmitri Tikhonov if (p == NULL) 266868e25b9SDmitri Tikhonov { 267868e25b9SDmitri Tikhonov len = 0; 268868e25b9SDmitri Tikhonov bEnd = p = (const BYTE *)(size_t)16; 269868e25b9SDmitri Tikhonov } 270868e25b9SDmitri Tikhonov#endif 271868e25b9SDmitri Tikhonov 272868e25b9SDmitri Tikhonov if (len >= 16) 273868e25b9SDmitri Tikhonov { 274868e25b9SDmitri Tikhonov const BYTE *const limit = bEnd - 16; 275868e25b9SDmitri Tikhonov U32 v1 = seed + PRIME32_1 + PRIME32_2; 276868e25b9SDmitri Tikhonov U32 v2 = seed + PRIME32_2; 277868e25b9SDmitri Tikhonov U32 v3 = seed + 0; 278868e25b9SDmitri Tikhonov U32 v4 = seed - PRIME32_1; 279868e25b9SDmitri Tikhonov 280868e25b9SDmitri Tikhonov do 281868e25b9SDmitri Tikhonov { 282868e25b9SDmitri Tikhonov v1 += XXH_get32bits(p) * PRIME32_2; 283868e25b9SDmitri Tikhonov v1 = XXH_rotl32(v1, 13); 284868e25b9SDmitri Tikhonov v1 *= PRIME32_1; 285868e25b9SDmitri Tikhonov p += 4; 286868e25b9SDmitri Tikhonov v2 += XXH_get32bits(p) * PRIME32_2; 287868e25b9SDmitri Tikhonov v2 = XXH_rotl32(v2, 13); 288868e25b9SDmitri Tikhonov v2 *= PRIME32_1; 289868e25b9SDmitri Tikhonov p += 4; 290868e25b9SDmitri Tikhonov v3 += XXH_get32bits(p) * PRIME32_2; 291868e25b9SDmitri Tikhonov v3 = XXH_rotl32(v3, 13); 292868e25b9SDmitri Tikhonov v3 *= PRIME32_1; 293868e25b9SDmitri Tikhonov p += 4; 294868e25b9SDmitri Tikhonov v4 += XXH_get32bits(p) * PRIME32_2; 295868e25b9SDmitri Tikhonov v4 = XXH_rotl32(v4, 13); 296868e25b9SDmitri Tikhonov v4 *= PRIME32_1; 297868e25b9SDmitri Tikhonov p += 4; 298868e25b9SDmitri Tikhonov } 299868e25b9SDmitri Tikhonov while (p <= limit); 300868e25b9SDmitri Tikhonov 301868e25b9SDmitri Tikhonov h32 = XXH_rotl32(v1, 1) + XXH_rotl32(v2, 7) + XXH_rotl32(v3, 302868e25b9SDmitri Tikhonov 12) + XXH_rotl32(v4, 18); 303868e25b9SDmitri Tikhonov } 304868e25b9SDmitri Tikhonov else 305868e25b9SDmitri Tikhonov h32 = seed + PRIME32_5; 306868e25b9SDmitri Tikhonov 307868e25b9SDmitri Tikhonov h32 += (U32) len; 308868e25b9SDmitri Tikhonov 309868e25b9SDmitri Tikhonov while (p + 4 <= bEnd) 310868e25b9SDmitri Tikhonov { 311868e25b9SDmitri Tikhonov h32 += XXH_get32bits(p) * PRIME32_3; 312868e25b9SDmitri Tikhonov h32 = XXH_rotl32(h32, 17) * PRIME32_4 ; 313868e25b9SDmitri Tikhonov p += 4; 314868e25b9SDmitri Tikhonov } 315868e25b9SDmitri Tikhonov 316868e25b9SDmitri Tikhonov while (p < bEnd) 317868e25b9SDmitri Tikhonov { 318868e25b9SDmitri Tikhonov h32 += (*p) * PRIME32_5; 319868e25b9SDmitri Tikhonov h32 = XXH_rotl32(h32, 11) * PRIME32_1 ; 320868e25b9SDmitri Tikhonov p++; 321868e25b9SDmitri Tikhonov } 322868e25b9SDmitri Tikhonov 323868e25b9SDmitri Tikhonov h32 ^= h32 >> 15; 324868e25b9SDmitri Tikhonov h32 *= PRIME32_2; 325868e25b9SDmitri Tikhonov h32 ^= h32 >> 13; 326868e25b9SDmitri Tikhonov h32 *= PRIME32_3; 327868e25b9SDmitri Tikhonov h32 ^= h32 >> 16; 328868e25b9SDmitri Tikhonov 329868e25b9SDmitri Tikhonov return h32; 330868e25b9SDmitri Tikhonov} 331868e25b9SDmitri Tikhonov 332868e25b9SDmitri Tikhonov 333868e25b9SDmitri Tikhonovunsigned int XXH32(const void *input, size_t len, unsigned seed) 334868e25b9SDmitri Tikhonov{ 335868e25b9SDmitri Tikhonov#if 0 336868e25b9SDmitri Tikhonov // Simple version, good for code maintenance, but unfortunately slow for small inputs 337868e25b9SDmitri Tikhonov XXH32_state_t state; 338868e25b9SDmitri Tikhonov XXH32_reset(&state, seed); 339868e25b9SDmitri Tikhonov XXH32_update(&state, input, len); 340868e25b9SDmitri Tikhonov return XXH32_digest(&state); 341868e25b9SDmitri Tikhonov#else 342868e25b9SDmitri Tikhonov XXH_endianess endian_detected = (XXH_endianess)XXH_CPU_LITTLE_ENDIAN; 343868e25b9SDmitri Tikhonov 344868e25b9SDmitri Tikhonov# if !defined(XXH_USE_UNALIGNED_ACCESS) 345868e25b9SDmitri Tikhonov if ((((size_t)input) & 3) == 346868e25b9SDmitri Tikhonov 0) // Input is aligned, let's leverage the speed advantage 347868e25b9SDmitri Tikhonov { 348868e25b9SDmitri Tikhonov if ((endian_detected == XXH_littleEndian) || XXH_FORCE_NATIVE_FORMAT) 349868e25b9SDmitri Tikhonov return XXH32_endian_align(input, len, seed, XXH_littleEndian, XXH_aligned); 350868e25b9SDmitri Tikhonov else 351868e25b9SDmitri Tikhonov return XXH32_endian_align(input, len, seed, XXH_bigEndian, XXH_aligned); 352868e25b9SDmitri Tikhonov } 353868e25b9SDmitri Tikhonov# endif 354868e25b9SDmitri Tikhonov 355868e25b9SDmitri Tikhonov if ((endian_detected == XXH_littleEndian) || XXH_FORCE_NATIVE_FORMAT) 356868e25b9SDmitri Tikhonov return XXH32_endian_align(input, len, seed, XXH_littleEndian, 357868e25b9SDmitri Tikhonov XXH_unaligned); 358868e25b9SDmitri Tikhonov else 359868e25b9SDmitri Tikhonov return XXH32_endian_align(input, len, seed, XXH_bigEndian, XXH_unaligned); 360868e25b9SDmitri Tikhonov#endif 361868e25b9SDmitri Tikhonov} 362868e25b9SDmitri Tikhonov 363868e25b9SDmitri TikhonovFORCE_INLINE U64 XXH64_endian_align(const void *input, size_t len, 364868e25b9SDmitri Tikhonov U64 seed, XXH_endianess endian, XXH_alignment align) 365868e25b9SDmitri Tikhonov{ 366868e25b9SDmitri Tikhonov const BYTE *p = (const BYTE *)input; 367868e25b9SDmitri Tikhonov const BYTE *bEnd = p + len; 368868e25b9SDmitri Tikhonov U64 h64; 369868e25b9SDmitri Tikhonov#define XXH_get64bits(p) XXH_readLE64_align(p, endian, align) 370868e25b9SDmitri Tikhonov 371868e25b9SDmitri Tikhonov#ifdef XXH_ACCEPT_NULL_INPUT_POINTER 372868e25b9SDmitri Tikhonov if (p == NULL) 373868e25b9SDmitri Tikhonov { 374868e25b9SDmitri Tikhonov len = 0; 375868e25b9SDmitri Tikhonov bEnd = p = (const BYTE *)(size_t)32; 376868e25b9SDmitri Tikhonov } 377868e25b9SDmitri Tikhonov#endif 378868e25b9SDmitri Tikhonov 379868e25b9SDmitri Tikhonov if (len >= 32) 380868e25b9SDmitri Tikhonov { 381868e25b9SDmitri Tikhonov const BYTE *const limit = bEnd - 32; 382868e25b9SDmitri Tikhonov U64 v1 = seed + PRIME64_1 + PRIME64_2; 383868e25b9SDmitri Tikhonov U64 v2 = seed + PRIME64_2; 384868e25b9SDmitri Tikhonov U64 v3 = seed + 0; 385868e25b9SDmitri Tikhonov U64 v4 = seed - PRIME64_1; 386868e25b9SDmitri Tikhonov 387868e25b9SDmitri Tikhonov do 388868e25b9SDmitri Tikhonov { 389868e25b9SDmitri Tikhonov v1 += XXH_get64bits(p) * PRIME64_2; 390868e25b9SDmitri Tikhonov p += 8; 391868e25b9SDmitri Tikhonov v1 = XXH_rotl64(v1, 31); 392868e25b9SDmitri Tikhonov v1 *= PRIME64_1; 393868e25b9SDmitri Tikhonov v2 += XXH_get64bits(p) * PRIME64_2; 394868e25b9SDmitri Tikhonov p += 8; 395868e25b9SDmitri Tikhonov v2 = XXH_rotl64(v2, 31); 396868e25b9SDmitri Tikhonov v2 *= PRIME64_1; 397868e25b9SDmitri Tikhonov v3 += XXH_get64bits(p) * PRIME64_2; 398868e25b9SDmitri Tikhonov p += 8; 399868e25b9SDmitri Tikhonov v3 = XXH_rotl64(v3, 31); 400868e25b9SDmitri Tikhonov v3 *= PRIME64_1; 401868e25b9SDmitri Tikhonov v4 += XXH_get64bits(p) * PRIME64_2; 402868e25b9SDmitri Tikhonov p += 8; 403868e25b9SDmitri Tikhonov v4 = XXH_rotl64(v4, 31); 404868e25b9SDmitri Tikhonov v4 *= PRIME64_1; 405868e25b9SDmitri Tikhonov } 406868e25b9SDmitri Tikhonov while (p <= limit); 407868e25b9SDmitri Tikhonov 408868e25b9SDmitri Tikhonov h64 = XXH_rotl64(v1, 1) + XXH_rotl64(v2, 7) + XXH_rotl64(v3, 409868e25b9SDmitri Tikhonov 12) + XXH_rotl64(v4, 18); 410868e25b9SDmitri Tikhonov 411868e25b9SDmitri Tikhonov v1 *= PRIME64_2; 412868e25b9SDmitri Tikhonov v1 = XXH_rotl64(v1, 31); 413868e25b9SDmitri Tikhonov v1 *= PRIME64_1; 414868e25b9SDmitri Tikhonov h64 ^= v1; 415868e25b9SDmitri Tikhonov h64 = h64 * PRIME64_1 + PRIME64_4; 416868e25b9SDmitri Tikhonov 417868e25b9SDmitri Tikhonov v2 *= PRIME64_2; 418868e25b9SDmitri Tikhonov v2 = XXH_rotl64(v2, 31); 419868e25b9SDmitri Tikhonov v2 *= PRIME64_1; 420868e25b9SDmitri Tikhonov h64 ^= v2; 421868e25b9SDmitri Tikhonov h64 = h64 * PRIME64_1 + PRIME64_4; 422868e25b9SDmitri Tikhonov 423868e25b9SDmitri Tikhonov v3 *= PRIME64_2; 424868e25b9SDmitri Tikhonov v3 = XXH_rotl64(v3, 31); 425868e25b9SDmitri Tikhonov v3 *= PRIME64_1; 426868e25b9SDmitri Tikhonov h64 ^= v3; 427868e25b9SDmitri Tikhonov h64 = h64 * PRIME64_1 + PRIME64_4; 428868e25b9SDmitri Tikhonov 429868e25b9SDmitri Tikhonov v4 *= PRIME64_2; 430868e25b9SDmitri Tikhonov v4 = XXH_rotl64(v4, 31); 431868e25b9SDmitri Tikhonov v4 *= PRIME64_1; 432868e25b9SDmitri Tikhonov h64 ^= v4; 433868e25b9SDmitri Tikhonov h64 = h64 * PRIME64_1 + PRIME64_4; 434868e25b9SDmitri Tikhonov } 435868e25b9SDmitri Tikhonov else 436868e25b9SDmitri Tikhonov h64 = seed + PRIME64_5; 437868e25b9SDmitri Tikhonov 438868e25b9SDmitri Tikhonov h64 += (U64) len; 439868e25b9SDmitri Tikhonov 440868e25b9SDmitri Tikhonov while (p + 8 <= bEnd) 441868e25b9SDmitri Tikhonov { 442868e25b9SDmitri Tikhonov U64 k1 = XXH_get64bits(p); 443868e25b9SDmitri Tikhonov k1 *= PRIME64_2; 444868e25b9SDmitri Tikhonov k1 = XXH_rotl64(k1, 31); 445868e25b9SDmitri Tikhonov k1 *= PRIME64_1; 446868e25b9SDmitri Tikhonov h64 ^= k1; 447868e25b9SDmitri Tikhonov h64 = XXH_rotl64(h64, 27) * PRIME64_1 + PRIME64_4; 448868e25b9SDmitri Tikhonov p += 8; 449868e25b9SDmitri Tikhonov } 450868e25b9SDmitri Tikhonov 451868e25b9SDmitri Tikhonov if (p + 4 <= bEnd) 452868e25b9SDmitri Tikhonov { 453868e25b9SDmitri Tikhonov h64 ^= (U64)(XXH_get32bits(p)) * PRIME64_1; 454868e25b9SDmitri Tikhonov h64 = XXH_rotl64(h64, 23) * PRIME64_2 + PRIME64_3; 455868e25b9SDmitri Tikhonov p += 4; 456868e25b9SDmitri Tikhonov } 457868e25b9SDmitri Tikhonov 458868e25b9SDmitri Tikhonov while (p < bEnd) 459868e25b9SDmitri Tikhonov { 460868e25b9SDmitri Tikhonov h64 ^= (*p) * PRIME64_5; 461868e25b9SDmitri Tikhonov h64 = XXH_rotl64(h64, 11) * PRIME64_1; 462868e25b9SDmitri Tikhonov p++; 463868e25b9SDmitri Tikhonov } 464868e25b9SDmitri Tikhonov 465868e25b9SDmitri Tikhonov h64 ^= h64 >> 33; 466868e25b9SDmitri Tikhonov h64 *= PRIME64_2; 467868e25b9SDmitri Tikhonov h64 ^= h64 >> 29; 468868e25b9SDmitri Tikhonov h64 *= PRIME64_3; 469868e25b9SDmitri Tikhonov h64 ^= h64 >> 32; 470868e25b9SDmitri Tikhonov 471868e25b9SDmitri Tikhonov return h64; 472868e25b9SDmitri Tikhonov} 473868e25b9SDmitri Tikhonov 474868e25b9SDmitri Tikhonov 475868e25b9SDmitri Tikhonovunsigned long long XXH64(const void *input, size_t len, 476868e25b9SDmitri Tikhonov unsigned long long seed) 477868e25b9SDmitri Tikhonov{ 478868e25b9SDmitri Tikhonov#if 0 479868e25b9SDmitri Tikhonov // Simple version, good for code maintenance, but unfortunately slow for small inputs 480868e25b9SDmitri Tikhonov XXH64_state_t state; 481868e25b9SDmitri Tikhonov XXH64_reset(&state, seed); 482868e25b9SDmitri Tikhonov XXH64_update(&state, input, len); 483868e25b9SDmitri Tikhonov return XXH64_digest(&state); 484868e25b9SDmitri Tikhonov#else 485868e25b9SDmitri Tikhonov XXH_endianess endian_detected = (XXH_endianess)XXH_CPU_LITTLE_ENDIAN; 486868e25b9SDmitri Tikhonov 487868e25b9SDmitri Tikhonov# if !defined(XXH_USE_UNALIGNED_ACCESS) 488868e25b9SDmitri Tikhonov if ((((size_t)input) & 7) == 489868e25b9SDmitri Tikhonov 0) // Input is aligned, let's leverage the speed advantage 490868e25b9SDmitri Tikhonov { 491868e25b9SDmitri Tikhonov if ((endian_detected == XXH_littleEndian) || XXH_FORCE_NATIVE_FORMAT) 492868e25b9SDmitri Tikhonov return XXH64_endian_align(input, len, seed, XXH_littleEndian, XXH_aligned); 493868e25b9SDmitri Tikhonov else 494868e25b9SDmitri Tikhonov return XXH64_endian_align(input, len, seed, XXH_bigEndian, XXH_aligned); 495868e25b9SDmitri Tikhonov } 496868e25b9SDmitri Tikhonov# endif 497868e25b9SDmitri Tikhonov 498868e25b9SDmitri Tikhonov if ((endian_detected == XXH_littleEndian) || XXH_FORCE_NATIVE_FORMAT) 499868e25b9SDmitri Tikhonov return XXH64_endian_align(input, len, seed, XXH_littleEndian, 500868e25b9SDmitri Tikhonov XXH_unaligned); 501868e25b9SDmitri Tikhonov else 502868e25b9SDmitri Tikhonov return XXH64_endian_align(input, len, seed, XXH_bigEndian, XXH_unaligned); 503868e25b9SDmitri Tikhonov#endif 504868e25b9SDmitri Tikhonov} 505868e25b9SDmitri Tikhonov 506868e25b9SDmitri Tikhonov/**************************************************** 507868e25b9SDmitri Tikhonov * Advanced Hash Functions 508868e25b9SDmitri Tikhonov****************************************************/ 509868e25b9SDmitri Tikhonov 510868e25b9SDmitri Tikhonov/*** Allocation ***/ 511868e25b9SDmitri Tikhonovtypedef struct 512868e25b9SDmitri Tikhonov{ 513868e25b9SDmitri Tikhonov U64 total_len; 514868e25b9SDmitri Tikhonov U32 seed; 515868e25b9SDmitri Tikhonov U32 v1; 516868e25b9SDmitri Tikhonov U32 v2; 517868e25b9SDmitri Tikhonov U32 v3; 518868e25b9SDmitri Tikhonov U32 v4; 519868e25b9SDmitri Tikhonov U32 mem32[4]; /* defined as U32 for alignment */ 520868e25b9SDmitri Tikhonov U32 memsize; 521868e25b9SDmitri Tikhonov} XXH_istate32_t; 522868e25b9SDmitri Tikhonov 523868e25b9SDmitri Tikhonovtypedef struct 524868e25b9SDmitri Tikhonov{ 525868e25b9SDmitri Tikhonov U64 total_len; 526868e25b9SDmitri Tikhonov U64 seed; 527868e25b9SDmitri Tikhonov U64 v1; 528868e25b9SDmitri Tikhonov U64 v2; 529868e25b9SDmitri Tikhonov U64 v3; 530868e25b9SDmitri Tikhonov U64 v4; 531868e25b9SDmitri Tikhonov U64 mem64[4]; /* defined as U64 for alignment */ 532868e25b9SDmitri Tikhonov U32 memsize; 533868e25b9SDmitri Tikhonov} XXH_istate64_t; 534868e25b9SDmitri Tikhonov 535868e25b9SDmitri Tikhonov 536868e25b9SDmitri TikhonovXXH32_state_t *XXH32_createState(void) 537868e25b9SDmitri Tikhonov{ 538868e25b9SDmitri Tikhonov XXH_STATIC_ASSERT(sizeof(XXH32_state_t) >= sizeof( 539868e25b9SDmitri Tikhonov XXH_istate32_t)); // A compilation error here means XXH32_state_t is not large enough 540868e25b9SDmitri Tikhonov return (XXH32_state_t *)XXH_malloc(sizeof(XXH32_state_t)); 541868e25b9SDmitri Tikhonov} 542868e25b9SDmitri TikhonovXXH_errorcode XXH32_freeState(XXH32_state_t *statePtr) 543868e25b9SDmitri Tikhonov{ 544868e25b9SDmitri Tikhonov XXH_free(statePtr); 545868e25b9SDmitri Tikhonov return XXH_OK; 546868e25b9SDmitri Tikhonov}; 547868e25b9SDmitri Tikhonov 548868e25b9SDmitri TikhonovXXH64_state_t *XXH64_createState(void) 549868e25b9SDmitri Tikhonov{ 550868e25b9SDmitri Tikhonov XXH_STATIC_ASSERT(sizeof(XXH64_state_t) >= sizeof( 551868e25b9SDmitri Tikhonov XXH_istate64_t)); // A compilation error here means XXH64_state_t is not large enough 552868e25b9SDmitri Tikhonov return (XXH64_state_t *)XXH_malloc(sizeof(XXH64_state_t)); 553868e25b9SDmitri Tikhonov} 554868e25b9SDmitri TikhonovXXH_errorcode XXH64_freeState(XXH64_state_t *statePtr) 555868e25b9SDmitri Tikhonov{ 556868e25b9SDmitri Tikhonov XXH_free(statePtr); 557868e25b9SDmitri Tikhonov return XXH_OK; 558868e25b9SDmitri Tikhonov}; 559868e25b9SDmitri Tikhonov 560868e25b9SDmitri Tikhonov 561868e25b9SDmitri Tikhonov/*** Hash feed ***/ 562868e25b9SDmitri Tikhonov 563868e25b9SDmitri TikhonovXXH_errorcode XXH32_reset(XXH32_state_t *state_in, U32 seed) 564868e25b9SDmitri Tikhonov{ 565868e25b9SDmitri Tikhonov XXH_istate32_t *state = (XXH_istate32_t *) state_in; 566868e25b9SDmitri Tikhonov state->seed = seed; 567868e25b9SDmitri Tikhonov state->v1 = seed + PRIME32_1 + PRIME32_2; 568868e25b9SDmitri Tikhonov state->v2 = seed + PRIME32_2; 569868e25b9SDmitri Tikhonov state->v3 = seed + 0; 570868e25b9SDmitri Tikhonov state->v4 = seed - PRIME32_1; 571868e25b9SDmitri Tikhonov state->total_len = 0; 572868e25b9SDmitri Tikhonov state->memsize = 0; 573868e25b9SDmitri Tikhonov return XXH_OK; 574868e25b9SDmitri Tikhonov} 575868e25b9SDmitri Tikhonov 576868e25b9SDmitri TikhonovXXH_errorcode XXH64_reset(XXH64_state_t *state_in, unsigned long long seed) 577868e25b9SDmitri Tikhonov{ 578868e25b9SDmitri Tikhonov XXH_istate64_t *state = (XXH_istate64_t *) state_in; 579868e25b9SDmitri Tikhonov state->seed = seed; 580868e25b9SDmitri Tikhonov state->v1 = seed + PRIME64_1 + PRIME64_2; 581868e25b9SDmitri Tikhonov state->v2 = seed + PRIME64_2; 582868e25b9SDmitri Tikhonov state->v3 = seed + 0; 583868e25b9SDmitri Tikhonov state->v4 = seed - PRIME64_1; 584868e25b9SDmitri Tikhonov state->total_len = 0; 585868e25b9SDmitri Tikhonov state->memsize = 0; 586868e25b9SDmitri Tikhonov return XXH_OK; 587868e25b9SDmitri Tikhonov} 588868e25b9SDmitri Tikhonov 589868e25b9SDmitri Tikhonov 590868e25b9SDmitri TikhonovFORCE_INLINE XXH_errorcode XXH32_update_endian(XXH32_state_t *state_in, 591868e25b9SDmitri Tikhonov const void *input, size_t len, XXH_endianess endian) 592868e25b9SDmitri Tikhonov{ 593868e25b9SDmitri Tikhonov XXH_istate32_t *state = (XXH_istate32_t *) state_in; 594868e25b9SDmitri Tikhonov const BYTE *p = (const BYTE *)input; 595868e25b9SDmitri Tikhonov const BYTE *const bEnd = p + len; 596868e25b9SDmitri Tikhonov 597868e25b9SDmitri Tikhonov#ifdef XXH_ACCEPT_NULL_INPUT_POINTER 598868e25b9SDmitri Tikhonov if (input == NULL) return XXH_ERROR; 599868e25b9SDmitri Tikhonov#endif 600868e25b9SDmitri Tikhonov 601868e25b9SDmitri Tikhonov state->total_len += len; 602868e25b9SDmitri Tikhonov 603868e25b9SDmitri Tikhonov if (state->memsize + len < 16) // fill in tmp buffer 604868e25b9SDmitri Tikhonov { 605868e25b9SDmitri Tikhonov XXH_memcpy((BYTE *)(state->mem32) + state->memsize, input, len); 606868e25b9SDmitri Tikhonov state->memsize += (U32)len; 607868e25b9SDmitri Tikhonov return XXH_OK; 608868e25b9SDmitri Tikhonov } 609868e25b9SDmitri Tikhonov 610868e25b9SDmitri Tikhonov if (state->memsize) // some data left from previous update 611868e25b9SDmitri Tikhonov { 612868e25b9SDmitri Tikhonov XXH_memcpy((BYTE *)(state->mem32) + state->memsize, input, 613868e25b9SDmitri Tikhonov 16 - state->memsize); 614868e25b9SDmitri Tikhonov { 615868e25b9SDmitri Tikhonov const U32 *p32 = state->mem32; 616868e25b9SDmitri Tikhonov state->v1 += XXH_readLE32(p32, endian) * PRIME32_2; 617868e25b9SDmitri Tikhonov state->v1 = XXH_rotl32(state->v1, 13); 618868e25b9SDmitri Tikhonov state->v1 *= PRIME32_1; 619868e25b9SDmitri Tikhonov p32++; 620868e25b9SDmitri Tikhonov state->v2 += XXH_readLE32(p32, endian) * PRIME32_2; 621868e25b9SDmitri Tikhonov state->v2 = XXH_rotl32(state->v2, 13); 622868e25b9SDmitri Tikhonov state->v2 *= PRIME32_1; 623868e25b9SDmitri Tikhonov p32++; 624868e25b9SDmitri Tikhonov state->v3 += XXH_readLE32(p32, endian) * PRIME32_2; 625868e25b9SDmitri Tikhonov state->v3 = XXH_rotl32(state->v3, 13); 626868e25b9SDmitri Tikhonov state->v3 *= PRIME32_1; 627868e25b9SDmitri Tikhonov p32++; 628868e25b9SDmitri Tikhonov state->v4 += XXH_readLE32(p32, endian) * PRIME32_2; 629868e25b9SDmitri Tikhonov state->v4 = XXH_rotl32(state->v4, 13); 630868e25b9SDmitri Tikhonov state->v4 *= PRIME32_1; 631868e25b9SDmitri Tikhonov p32++; 632868e25b9SDmitri Tikhonov } 633868e25b9SDmitri Tikhonov p += 16 - state->memsize; 634868e25b9SDmitri Tikhonov state->memsize = 0; 635868e25b9SDmitri Tikhonov } 636868e25b9SDmitri Tikhonov 637868e25b9SDmitri Tikhonov if (p <= bEnd - 16) 638868e25b9SDmitri Tikhonov { 639868e25b9SDmitri Tikhonov const BYTE *const limit = bEnd - 16; 640868e25b9SDmitri Tikhonov U32 v1 = state->v1; 641868e25b9SDmitri Tikhonov U32 v2 = state->v2; 642868e25b9SDmitri Tikhonov U32 v3 = state->v3; 643868e25b9SDmitri Tikhonov U32 v4 = state->v4; 644868e25b9SDmitri Tikhonov 645868e25b9SDmitri Tikhonov do 646868e25b9SDmitri Tikhonov { 647868e25b9SDmitri Tikhonov v1 += XXH_readLE32(p, endian) * PRIME32_2; 648868e25b9SDmitri Tikhonov v1 = XXH_rotl32(v1, 13); 649868e25b9SDmitri Tikhonov v1 *= PRIME32_1; 650868e25b9SDmitri Tikhonov p += 4; 651868e25b9SDmitri Tikhonov v2 += XXH_readLE32(p, endian) * PRIME32_2; 652868e25b9SDmitri Tikhonov v2 = XXH_rotl32(v2, 13); 653868e25b9SDmitri Tikhonov v2 *= PRIME32_1; 654868e25b9SDmitri Tikhonov p += 4; 655868e25b9SDmitri Tikhonov v3 += XXH_readLE32(p, endian) * PRIME32_2; 656868e25b9SDmitri Tikhonov v3 = XXH_rotl32(v3, 13); 657868e25b9SDmitri Tikhonov v3 *= PRIME32_1; 658868e25b9SDmitri Tikhonov p += 4; 659868e25b9SDmitri Tikhonov v4 += XXH_readLE32(p, endian) * PRIME32_2; 660868e25b9SDmitri Tikhonov v4 = XXH_rotl32(v4, 13); 661868e25b9SDmitri Tikhonov v4 *= PRIME32_1; 662868e25b9SDmitri Tikhonov p += 4; 663868e25b9SDmitri Tikhonov } 664868e25b9SDmitri Tikhonov while (p <= limit); 665868e25b9SDmitri Tikhonov 666868e25b9SDmitri Tikhonov state->v1 = v1; 667868e25b9SDmitri Tikhonov state->v2 = v2; 668868e25b9SDmitri Tikhonov state->v3 = v3; 669868e25b9SDmitri Tikhonov state->v4 = v4; 670868e25b9SDmitri Tikhonov } 671868e25b9SDmitri Tikhonov 672868e25b9SDmitri Tikhonov if (p < bEnd) 673868e25b9SDmitri Tikhonov { 674868e25b9SDmitri Tikhonov XXH_memcpy(state->mem32, p, bEnd - p); 675868e25b9SDmitri Tikhonov state->memsize = (int)(bEnd - p); 676868e25b9SDmitri Tikhonov } 677868e25b9SDmitri Tikhonov 678868e25b9SDmitri Tikhonov return XXH_OK; 679868e25b9SDmitri Tikhonov} 680868e25b9SDmitri Tikhonov 681868e25b9SDmitri TikhonovXXH_errorcode XXH32_update(XXH32_state_t *state_in, const void *input, 682868e25b9SDmitri Tikhonov size_t len) 683868e25b9SDmitri Tikhonov{ 684868e25b9SDmitri Tikhonov XXH_endianess endian_detected = (XXH_endianess)XXH_CPU_LITTLE_ENDIAN; 685868e25b9SDmitri Tikhonov 686868e25b9SDmitri Tikhonov if ((endian_detected == XXH_littleEndian) || XXH_FORCE_NATIVE_FORMAT) 687868e25b9SDmitri Tikhonov return XXH32_update_endian(state_in, input, len, XXH_littleEndian); 688868e25b9SDmitri Tikhonov else 689868e25b9SDmitri Tikhonov return XXH32_update_endian(state_in, input, len, XXH_bigEndian); 690868e25b9SDmitri Tikhonov} 691868e25b9SDmitri Tikhonov 692868e25b9SDmitri Tikhonov 693868e25b9SDmitri Tikhonov 694868e25b9SDmitri TikhonovFORCE_INLINE U32 XXH32_digest_endian(const XXH32_state_t *state_in, 695868e25b9SDmitri Tikhonov XXH_endianess endian) 696868e25b9SDmitri Tikhonov{ 697868e25b9SDmitri Tikhonov XXH_istate32_t *state = (XXH_istate32_t *) state_in; 698868e25b9SDmitri Tikhonov const BYTE *p = (const BYTE *)state->mem32; 699868e25b9SDmitri Tikhonov BYTE *bEnd = (BYTE *)(state->mem32) + state->memsize; 700868e25b9SDmitri Tikhonov U32 h32; 701868e25b9SDmitri Tikhonov 702868e25b9SDmitri Tikhonov if (state->total_len >= 16) 703868e25b9SDmitri Tikhonov h32 = XXH_rotl32(state->v1, 1) + XXH_rotl32(state->v2, 704868e25b9SDmitri Tikhonov 7) + XXH_rotl32(state->v3, 12) + XXH_rotl32(state->v4, 18); 705868e25b9SDmitri Tikhonov else 706868e25b9SDmitri Tikhonov h32 = state->seed + PRIME32_5; 707868e25b9SDmitri Tikhonov 708868e25b9SDmitri Tikhonov h32 += (U32) state->total_len; 709868e25b9SDmitri Tikhonov 710868e25b9SDmitri Tikhonov while (p + 4 <= bEnd) 711868e25b9SDmitri Tikhonov { 712868e25b9SDmitri Tikhonov h32 += XXH_readLE32(p, endian) * PRIME32_3; 713868e25b9SDmitri Tikhonov h32 = XXH_rotl32(h32, 17) * PRIME32_4; 714868e25b9SDmitri Tikhonov p += 4; 715868e25b9SDmitri Tikhonov } 716868e25b9SDmitri Tikhonov 717868e25b9SDmitri Tikhonov while (p < bEnd) 718868e25b9SDmitri Tikhonov { 719868e25b9SDmitri Tikhonov h32 += (*p) * PRIME32_5; 720868e25b9SDmitri Tikhonov h32 = XXH_rotl32(h32, 11) * PRIME32_1; 721868e25b9SDmitri Tikhonov p++; 722868e25b9SDmitri Tikhonov } 723868e25b9SDmitri Tikhonov 724868e25b9SDmitri Tikhonov h32 ^= h32 >> 15; 725868e25b9SDmitri Tikhonov h32 *= PRIME32_2; 726868e25b9SDmitri Tikhonov h32 ^= h32 >> 13; 727868e25b9SDmitri Tikhonov h32 *= PRIME32_3; 728868e25b9SDmitri Tikhonov h32 ^= h32 >> 16; 729868e25b9SDmitri Tikhonov 730868e25b9SDmitri Tikhonov return h32; 731868e25b9SDmitri Tikhonov} 732868e25b9SDmitri Tikhonov 733868e25b9SDmitri Tikhonov 734868e25b9SDmitri TikhonovU32 XXH32_digest(const XXH32_state_t *state_in) 735868e25b9SDmitri Tikhonov{ 736868e25b9SDmitri Tikhonov XXH_endianess endian_detected = (XXH_endianess)XXH_CPU_LITTLE_ENDIAN; 737868e25b9SDmitri Tikhonov 738868e25b9SDmitri Tikhonov if ((endian_detected == XXH_littleEndian) || XXH_FORCE_NATIVE_FORMAT) 739868e25b9SDmitri Tikhonov return XXH32_digest_endian(state_in, XXH_littleEndian); 740868e25b9SDmitri Tikhonov else 741868e25b9SDmitri Tikhonov return XXH32_digest_endian(state_in, XXH_bigEndian); 742868e25b9SDmitri Tikhonov} 743868e25b9SDmitri Tikhonov 744868e25b9SDmitri Tikhonov 745868e25b9SDmitri TikhonovFORCE_INLINE XXH_errorcode XXH64_update_endian(XXH64_state_t *state_in, 746868e25b9SDmitri Tikhonov const void *input, size_t len, XXH_endianess endian) 747868e25b9SDmitri Tikhonov{ 748868e25b9SDmitri Tikhonov XXH_istate64_t *state = (XXH_istate64_t *) state_in; 749868e25b9SDmitri Tikhonov const BYTE *p = (const BYTE *)input; 750868e25b9SDmitri Tikhonov const BYTE *const bEnd = p + len; 751868e25b9SDmitri Tikhonov 752868e25b9SDmitri Tikhonov#ifdef XXH_ACCEPT_NULL_INPUT_POINTER 753868e25b9SDmitri Tikhonov if (input == NULL) return XXH_ERROR; 754868e25b9SDmitri Tikhonov#endif 755868e25b9SDmitri Tikhonov 756868e25b9SDmitri Tikhonov state->total_len += len; 757868e25b9SDmitri Tikhonov 758868e25b9SDmitri Tikhonov if (state->memsize + len < 32) // fill in tmp buffer 759868e25b9SDmitri Tikhonov { 760868e25b9SDmitri Tikhonov XXH_memcpy(((BYTE *)state->mem64) + state->memsize, input, len); 761868e25b9SDmitri Tikhonov state->memsize += (U32)len; 762868e25b9SDmitri Tikhonov return XXH_OK; 763868e25b9SDmitri Tikhonov } 764868e25b9SDmitri Tikhonov 765868e25b9SDmitri Tikhonov if (state->memsize) // some data left from previous update 766868e25b9SDmitri Tikhonov { 767868e25b9SDmitri Tikhonov XXH_memcpy(((BYTE *)state->mem64) + state->memsize, input, 768868e25b9SDmitri Tikhonov 32 - state->memsize); 769868e25b9SDmitri Tikhonov { 770868e25b9SDmitri Tikhonov const U64 *p64 = state->mem64; 771868e25b9SDmitri Tikhonov state->v1 += XXH_readLE64(p64, endian) * PRIME64_2; 772868e25b9SDmitri Tikhonov state->v1 = XXH_rotl64(state->v1, 31); 773868e25b9SDmitri Tikhonov state->v1 *= PRIME64_1; 774868e25b9SDmitri Tikhonov p64++; 775868e25b9SDmitri Tikhonov state->v2 += XXH_readLE64(p64, endian) * PRIME64_2; 776868e25b9SDmitri Tikhonov state->v2 = XXH_rotl64(state->v2, 31); 777868e25b9SDmitri Tikhonov state->v2 *= PRIME64_1; 778868e25b9SDmitri Tikhonov p64++; 779868e25b9SDmitri Tikhonov state->v3 += XXH_readLE64(p64, endian) * PRIME64_2; 780868e25b9SDmitri Tikhonov state->v3 = XXH_rotl64(state->v3, 31); 781868e25b9SDmitri Tikhonov state->v3 *= PRIME64_1; 782868e25b9SDmitri Tikhonov p64++; 783868e25b9SDmitri Tikhonov state->v4 += XXH_readLE64(p64, endian) * PRIME64_2; 784868e25b9SDmitri Tikhonov state->v4 = XXH_rotl64(state->v4, 31); 785868e25b9SDmitri Tikhonov state->v4 *= PRIME64_1; 786868e25b9SDmitri Tikhonov p64++; 787868e25b9SDmitri Tikhonov } 788868e25b9SDmitri Tikhonov p += 32 - state->memsize; 789868e25b9SDmitri Tikhonov state->memsize = 0; 790868e25b9SDmitri Tikhonov } 791868e25b9SDmitri Tikhonov 792868e25b9SDmitri Tikhonov if (p + 32 <= bEnd) 793868e25b9SDmitri Tikhonov { 794868e25b9SDmitri Tikhonov const BYTE *const limit = bEnd - 32; 795868e25b9SDmitri Tikhonov U64 v1 = state->v1; 796868e25b9SDmitri Tikhonov U64 v2 = state->v2; 797868e25b9SDmitri Tikhonov U64 v3 = state->v3; 798868e25b9SDmitri Tikhonov U64 v4 = state->v4; 799868e25b9SDmitri Tikhonov 800868e25b9SDmitri Tikhonov do 801868e25b9SDmitri Tikhonov { 802868e25b9SDmitri Tikhonov v1 += XXH_readLE64(p, endian) * PRIME64_2; 803868e25b9SDmitri Tikhonov v1 = XXH_rotl64(v1, 31); 804868e25b9SDmitri Tikhonov v1 *= PRIME64_1; 805868e25b9SDmitri Tikhonov p += 8; 806868e25b9SDmitri Tikhonov v2 += XXH_readLE64(p, endian) * PRIME64_2; 807868e25b9SDmitri Tikhonov v2 = XXH_rotl64(v2, 31); 808868e25b9SDmitri Tikhonov v2 *= PRIME64_1; 809868e25b9SDmitri Tikhonov p += 8; 810868e25b9SDmitri Tikhonov v3 += XXH_readLE64(p, endian) * PRIME64_2; 811868e25b9SDmitri Tikhonov v3 = XXH_rotl64(v3, 31); 812868e25b9SDmitri Tikhonov v3 *= PRIME64_1; 813868e25b9SDmitri Tikhonov p += 8; 814868e25b9SDmitri Tikhonov v4 += XXH_readLE64(p, endian) * PRIME64_2; 815868e25b9SDmitri Tikhonov v4 = XXH_rotl64(v4, 31); 816868e25b9SDmitri Tikhonov v4 *= PRIME64_1; 817868e25b9SDmitri Tikhonov p += 8; 818868e25b9SDmitri Tikhonov } 819868e25b9SDmitri Tikhonov while (p <= limit); 820868e25b9SDmitri Tikhonov 821868e25b9SDmitri Tikhonov state->v1 = v1; 822868e25b9SDmitri Tikhonov state->v2 = v2; 823868e25b9SDmitri Tikhonov state->v3 = v3; 824868e25b9SDmitri Tikhonov state->v4 = v4; 825868e25b9SDmitri Tikhonov } 826868e25b9SDmitri Tikhonov 827868e25b9SDmitri Tikhonov if (p < bEnd) 828868e25b9SDmitri Tikhonov { 829868e25b9SDmitri Tikhonov XXH_memcpy(state->mem64, p, bEnd - p); 830868e25b9SDmitri Tikhonov state->memsize = (int)(bEnd - p); 831868e25b9SDmitri Tikhonov } 832868e25b9SDmitri Tikhonov 833868e25b9SDmitri Tikhonov return XXH_OK; 834868e25b9SDmitri Tikhonov} 835868e25b9SDmitri Tikhonov 836868e25b9SDmitri TikhonovXXH_errorcode XXH64_update(XXH64_state_t *state_in, const void *input, 837868e25b9SDmitri Tikhonov size_t len) 838868e25b9SDmitri Tikhonov{ 839868e25b9SDmitri Tikhonov XXH_endianess endian_detected = (XXH_endianess)XXH_CPU_LITTLE_ENDIAN; 840868e25b9SDmitri Tikhonov 841868e25b9SDmitri Tikhonov if ((endian_detected == XXH_littleEndian) || XXH_FORCE_NATIVE_FORMAT) 842868e25b9SDmitri Tikhonov return XXH64_update_endian(state_in, input, len, XXH_littleEndian); 843868e25b9SDmitri Tikhonov else 844868e25b9SDmitri Tikhonov return XXH64_update_endian(state_in, input, len, XXH_bigEndian); 845868e25b9SDmitri Tikhonov} 846868e25b9SDmitri Tikhonov 847868e25b9SDmitri Tikhonov 848868e25b9SDmitri Tikhonov 849868e25b9SDmitri TikhonovFORCE_INLINE U64 XXH64_digest_endian(const XXH64_state_t *state_in, 850868e25b9SDmitri Tikhonov XXH_endianess endian) 851868e25b9SDmitri Tikhonov{ 852868e25b9SDmitri Tikhonov XXH_istate64_t *state = (XXH_istate64_t *) state_in; 853868e25b9SDmitri Tikhonov const BYTE *p = (const BYTE *)state->mem64; 854868e25b9SDmitri Tikhonov BYTE *bEnd = (BYTE *)state->mem64 + state->memsize; 855868e25b9SDmitri Tikhonov U64 h64; 856868e25b9SDmitri Tikhonov 857868e25b9SDmitri Tikhonov if (state->total_len >= 32) 858868e25b9SDmitri Tikhonov { 859868e25b9SDmitri Tikhonov U64 v1 = state->v1; 860868e25b9SDmitri Tikhonov U64 v2 = state->v2; 861868e25b9SDmitri Tikhonov U64 v3 = state->v3; 862868e25b9SDmitri Tikhonov U64 v4 = state->v4; 863868e25b9SDmitri Tikhonov 864868e25b9SDmitri Tikhonov h64 = XXH_rotl64(v1, 1) + XXH_rotl64(v2, 7) + XXH_rotl64(v3, 865868e25b9SDmitri Tikhonov 12) + XXH_rotl64(v4, 18); 866868e25b9SDmitri Tikhonov 867868e25b9SDmitri Tikhonov v1 *= PRIME64_2; 868868e25b9SDmitri Tikhonov v1 = XXH_rotl64(v1, 31); 869868e25b9SDmitri Tikhonov v1 *= PRIME64_1; 870868e25b9SDmitri Tikhonov h64 ^= v1; 871868e25b9SDmitri Tikhonov h64 = h64 * PRIME64_1 + PRIME64_4; 872868e25b9SDmitri Tikhonov 873868e25b9SDmitri Tikhonov v2 *= PRIME64_2; 874868e25b9SDmitri Tikhonov v2 = XXH_rotl64(v2, 31); 875868e25b9SDmitri Tikhonov v2 *= PRIME64_1; 876868e25b9SDmitri Tikhonov h64 ^= v2; 877868e25b9SDmitri Tikhonov h64 = h64 * PRIME64_1 + PRIME64_4; 878868e25b9SDmitri Tikhonov 879868e25b9SDmitri Tikhonov v3 *= PRIME64_2; 880868e25b9SDmitri Tikhonov v3 = XXH_rotl64(v3, 31); 881868e25b9SDmitri Tikhonov v3 *= PRIME64_1; 882868e25b9SDmitri Tikhonov h64 ^= v3; 883868e25b9SDmitri Tikhonov h64 = h64 * PRIME64_1 + PRIME64_4; 884868e25b9SDmitri Tikhonov 885868e25b9SDmitri Tikhonov v4 *= PRIME64_2; 886868e25b9SDmitri Tikhonov v4 = XXH_rotl64(v4, 31); 887868e25b9SDmitri Tikhonov v4 *= PRIME64_1; 888868e25b9SDmitri Tikhonov h64 ^= v4; 889868e25b9SDmitri Tikhonov h64 = h64 * PRIME64_1 + PRIME64_4; 890868e25b9SDmitri Tikhonov } 891868e25b9SDmitri Tikhonov else 892868e25b9SDmitri Tikhonov h64 = state->seed + PRIME64_5; 893868e25b9SDmitri Tikhonov 894868e25b9SDmitri Tikhonov h64 += (U64) state->total_len; 895868e25b9SDmitri Tikhonov 896868e25b9SDmitri Tikhonov while (p + 8 <= bEnd) 897868e25b9SDmitri Tikhonov { 898868e25b9SDmitri Tikhonov U64 k1 = XXH_readLE64(p, endian); 899868e25b9SDmitri Tikhonov k1 *= PRIME64_2; 900868e25b9SDmitri Tikhonov k1 = XXH_rotl64(k1, 31); 901868e25b9SDmitri Tikhonov k1 *= PRIME64_1; 902868e25b9SDmitri Tikhonov h64 ^= k1; 903868e25b9SDmitri Tikhonov h64 = XXH_rotl64(h64, 27) * PRIME64_1 + PRIME64_4; 904868e25b9SDmitri Tikhonov p += 8; 905868e25b9SDmitri Tikhonov } 906868e25b9SDmitri Tikhonov 907868e25b9SDmitri Tikhonov if (p + 4 <= bEnd) 908868e25b9SDmitri Tikhonov { 909868e25b9SDmitri Tikhonov h64 ^= (U64)(XXH_readLE32(p, endian)) * PRIME64_1; 910868e25b9SDmitri Tikhonov h64 = XXH_rotl64(h64, 23) * PRIME64_2 + PRIME64_3; 911868e25b9SDmitri Tikhonov p += 4; 912868e25b9SDmitri Tikhonov } 913868e25b9SDmitri Tikhonov 914868e25b9SDmitri Tikhonov while (p < bEnd) 915868e25b9SDmitri Tikhonov { 916868e25b9SDmitri Tikhonov h64 ^= (*p) * PRIME64_5; 917868e25b9SDmitri Tikhonov h64 = XXH_rotl64(h64, 11) * PRIME64_1; 918868e25b9SDmitri Tikhonov p++; 919868e25b9SDmitri Tikhonov } 920868e25b9SDmitri Tikhonov 921868e25b9SDmitri Tikhonov h64 ^= h64 >> 33; 922868e25b9SDmitri Tikhonov h64 *= PRIME64_2; 923868e25b9SDmitri Tikhonov h64 ^= h64 >> 29; 924868e25b9SDmitri Tikhonov h64 *= PRIME64_3; 925868e25b9SDmitri Tikhonov h64 ^= h64 >> 32; 926868e25b9SDmitri Tikhonov 927868e25b9SDmitri Tikhonov return h64; 928868e25b9SDmitri Tikhonov} 929868e25b9SDmitri Tikhonov 930868e25b9SDmitri Tikhonov 931868e25b9SDmitri Tikhonovunsigned long long XXH64_digest(const XXH64_state_t *state_in) 932868e25b9SDmitri Tikhonov{ 933868e25b9SDmitri Tikhonov XXH_endianess endian_detected = (XXH_endianess)XXH_CPU_LITTLE_ENDIAN; 934868e25b9SDmitri Tikhonov 935868e25b9SDmitri Tikhonov if ((endian_detected == XXH_littleEndian) || XXH_FORCE_NATIVE_FORMAT) 936868e25b9SDmitri Tikhonov return XXH64_digest_endian(state_in, XXH_littleEndian); 937868e25b9SDmitri Tikhonov else 938868e25b9SDmitri Tikhonov return XXH64_digest_endian(state_in, XXH_bigEndian); 939868e25b9SDmitri Tikhonov} 940868e25b9SDmitri Tikhonov 941868e25b9SDmitri Tikhonov 942