39#if defined( _MSC_VER )
40 #define FORCE_INLINE __forceinline
42 #define ROTL64( x, y ) _rotl64( x, y )
43 #define BIG_CONSTANT( x ) ( x )
46 #define FORCE_INLINE inline __attribute__( ( always_inline ) )
49 return ( x << r ) | ( x >> ( 64 - r ) );
51 #define ROTL64( x, y ) mmh3_rotl64( x, y )
52 #define BIG_CONSTANT( x ) ( x##LLU )
75 size_t remaining = length;
77 while( remaining >= 16 )
79 memcpy(
blocks, data, 16 );
88 memcpy(
blocks, data, remaining );
89 size_t padding = 4 - ( remaining + 4 ) % 4;
90 memset(
reinterpret_cast<uint8_t*
>(
blocks ) + remaining, 0, padding );
91 len += remaining + padding;
97 addData(
reinterpret_cast<const uint8_t*
>( input.data() ), input.length() );
102 addData(
reinterpret_cast<const uint8_t*
>( input.data() ), input.size() );
135 static const uint64_t c1 =
BIG_CONSTANT( 0x87c37b91114253d5 );
136 static const uint64_t c2 =
BIG_CONSTANT( 0x4cf5ad432745937f );
141 k1 *= c1; k1 =
ROTL64(k1,31); k1 *= c2;
h1 ^= k1;
145 k2 *= c2; k2 =
ROTL64(k2,33); k2 *= c1;
h2 ^= k2;
152 const uint8_t * tail = (
const uint8_t*)(
blocks);
154 static const uint64_t c1 =
BIG_CONSTANT( 0x87c37b91114253d5 );
155 static const uint64_t c2 =
BIG_CONSTANT( 0x4cf5ad432745937f );
162 case 15: k2 ^= ((uint64_t)tail[14]) << 48; [[fallthrough]];
163 case 14: k2 ^= ((uint64_t)tail[13]) << 40; [[fallthrough]];
164 case 13: k2 ^= ((uint64_t)tail[12]) << 32; [[fallthrough]];
165 case 12: k2 ^= ((uint64_t)tail[11]) << 24; [[fallthrough]];
166 case 11: k2 ^= ((uint64_t)tail[10]) << 16; [[fallthrough]];
167 case 10: k2 ^= ((uint64_t)tail[ 9]) << 8; [[fallthrough]];
168 case 9: k2 ^= ((uint64_t)tail[ 8]) << 0;
169 k2 *= c2; k2 =
ROTL64(k2,33); k2 *= c1;
h2 ^= k2;
172 case 8: k1 ^= ((uint64_t)tail[ 7]) << 56; [[fallthrough]];
173 case 7: k1 ^= ((uint64_t)tail[ 6]) << 48; [[fallthrough]];
174 case 6: k1 ^= ((uint64_t)tail[ 5]) << 40; [[fallthrough]];
175 case 5: k1 ^= ((uint64_t)tail[ 4]) << 32; [[fallthrough]];
176 case 4: k1 ^= ((uint64_t)tail[ 3]) << 24; [[fallthrough]];
177 case 3: k1 ^= ((uint64_t)tail[ 2]) << 16; [[fallthrough]];
178 case 2: k1 ^= ((uint64_t)tail[ 1]) << 8; [[fallthrough]];
179 case 1: k1 ^= ((uint64_t)tail[ 0]) << 0;
180 k1 *= c1; k1 =
ROTL64(k1,31); k1 *= c2;
h1 ^= k1;
A streaming C++ equivalent for MurmurHash3_x64_128.
FORCE_INLINE void hashBlock()
FORCE_INLINE void add(int32_t input)
FORCE_INLINE void add(const std::vector< char > &input)
FORCE_INLINE void addData(const uint8_t *data, size_t length)
FORCE_INLINE void add(const std::string &input)
FORCE_INLINE HASH_128 digest()
MMH3_HASH(uint32_t aSeed)
static FORCE_INLINE uint64_t fmix64(uint64_t k)
FORCE_INLINE void reset(uint32_t aSeed=0)
FORCE_INLINE uint64_t getblock64(int i)
FORCE_INLINE void hashTail()
FORCE_INLINE void hashFinal(HASH_128 &out)
uint64_t mmh3_rotl64(uint64_t x, int8_t r)
A storage class for 128-bit hash value.