10 v0 += v1; v1 = std::rotl(v1, 13); v1 ^= v0; \
11 v0 = std::rotl(v0, 32); \
12 v2 += v3; v3 = std::rotl(v3, 16); v3 ^= v2; \
13 v0 += v3; v3 = std::rotl(v3, 21); v3 ^= v0; \
14 v2 += v1; v1 = std::rotl(v1, 17); v1 ^= v2; \
15 v2 = std::rotl(v2, 32); \
20 v[0] = 0x736f6d6570736575ULL ^ k0;
21 v[1] = 0x646f72616e646f6dULL ^ k1;
22 v[2] = 0x6c7967656e657261ULL ^ k0;
23 v[3] = 0x7465646279746573ULL ^ k1;
30 uint64_t v0 =
v[0], v1 =
v[1], v2 =
v[2], v3 =
v[3];
50 uint64_t v0 =
v[0], v1 =
v[1], v2 =
v[2], v3 =
v[3];
54 while (
data.size() > 0) {
55 t |= uint64_t{
data.front()} << (8 * (c % 8));
79 uint64_t v0 =
v[0], v1 =
v[1], v2 =
v[2], v3 =
v[3];
81 uint64_t
t =
tmp | (((uint64_t)
count) << 56);
92 return v0 ^ v1 ^ v2 ^ v3;
100 uint64_t v0 = 0x736f6d6570736575ULL ^ k0;
101 uint64_t v1 = 0x646f72616e646f6dULL ^ k1;
102 uint64_t v2 = 0x6c7967656e657261ULL ^ k0;
103 uint64_t v3 = 0x7465646279746573ULL ^ k1 ^ d;
123 v3 ^= (uint64_t{4}) << 59;
126 v0 ^= (uint64_t{4}) << 59;
132 return v0 ^ v1 ^ v2 ^ v3;
140 uint64_t v0 = 0x736f6d6570736575ULL ^ k0;
141 uint64_t v1 = 0x646f72616e646f6dULL ^ k1;
142 uint64_t v2 = 0x6c7967656e657261ULL ^ k0;
143 uint64_t v3 = 0x7465646279746573ULL ^ k1 ^ d;
163 d = ((uint64_t{36}) << 56) | extra;
173 return v0 ^ v1 ^ v2 ^ v3;
uint64_t Finalize() const
Compute the 64-bit SipHash-2-4 of the data written so far.
CSipHasher(uint64_t k0, uint64_t k1)
Construct a SipHash calculator initialized with 128-bit key (k0, k1)
CSipHasher & Write(uint64_t data)
Hash a 64-bit integer worth of data It is treated as if this was the little-endian interpretation of ...
constexpr uint64_t GetUint64(int pos) const
uint64_t SipHashUint256Extra(uint64_t k0, uint64_t k1, const uint256 &val, uint32_t extra)
uint64_t SipHashUint256(uint64_t k0, uint64_t k1, const uint256 &val)
Optimized SipHash-2-4 implementation for uint256.