File size: 3,276 Bytes
5d3e4b5 80ea4ee 5d3e4b5 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 |
import torch
# xxhash primes converted to int64
XXPRIME_1 = -(11400714785074694791 ^ 0xffffffffffffffff) - 1
XXPRIME_2 = -(14029467366897019727 ^ 0xffffffffffffffff) - 1
XXPRIME_5 = 2870177450012600261
# python tuple value
XXPRIME_5_3527539 = XXPRIME_5 ^ 3527539
def tensor_python_tuple_hash(items, out_or_in_place):
# https://github.com/python/cpython/blob/v3.13.2/Objects/tupleobject.c#L321
# this is apparently a simplified & modified form of xxhash
# learning xxhash could likely improve the code in this file
len_ = len(items)
# first iteration pulled out to provide storage placement
if out_or_in_place is None:
# in place
acc = torch.add(XXPRIME_5, items[0], alpha=XXPRIME_2, out=items[0])
else:
# place in out_or_in_place
acc = torch.add(XXPRIME_5, items[0], alpha=XXPRIME_2, out=out_or_in_place)
# bitwise rotation
upshift = acc << 31
acc >>= 33
acc &= 0x7fffffff # mask int64 sign extension
acc |= upshift
acc *= XXPRIME_1
for i in range(1, len_):
# acc += x * prime2
acc.add_(items[i], alpha=XXPRIME_2)
# bitwise rotation
upshift = acc << 31
acc >>= 33
acc &= 0x7fffffff # mask int64 sign extension
acc |= upshift
acc *= XXPRIME_1
acc += (len_ ^ XXPRIME_5_3527539)
return acc
def hash(buffer, *incoming_unhashed_ints):
if len(buffer) < 16:
return bytes(buffer).__hash__()
# first pass
# - allocate storage
# - place unhashed ints
words = len(buffer) // 8
dwords = words // 2
incoming_data = torch.frombuffer(buffer, count=words, dtype=torch.int64)
incoming_unhashed_ints = [int.from_bytes(buffer[words*8:],'little')] + list(incoming_unhashed_ints)
incoming_hashable_length = words & ~1
incoming_unhashed_length = words & 1
incoming_unhashed_int_length = len(incoming_unhashed_ints)
incoming_hashable_data = incoming_data[:incoming_hashable_length].view(2,-1)
incoming_unhashed_data = incoming_data[incoming_hashable_length:]
storage = torch.empty([dwords + incoming_unhashed_length + incoming_unhashed_int_length], dtype=torch.int64)
hashed_data = tensor_python_tuple_hash(incoming_hashable_data, out_or_in_place=storage[:dwords])
storage[dwords:-incoming_unhashed_int_length] = incoming_unhashed_data
for idx in range(incoming_unhashed_int_length):
storage[dwords+idx] = incoming_unhashed_ints[idx]
incoming_data = storage
words = len(incoming_data)
dwords = words // 2
# iterative passes
while words > 1:
incoming_hashable_length = words & ~1
incoming_unhashed_length = words & 1
incoming_hashable_data = incoming_data[:incoming_hashable_length].view(2,-1)
incoming_unhashed_tensor_data = incoming_data[incoming_hashable_length:]
hashed_data = tensor_python_tuple_hash(incoming_hashable_data, out_or_in_place=None)
words = dwords + incoming_unhashed_length
storage[dwords:words] = incoming_unhashed_tensor_data
incoming_data = storage[:words]
dwords = words // 2
return incoming_data[0].item()
if __name__ == '__main__':
print(hash(bytearray(b'the quick brown fox jumped over the lazy dog')))
|