File size: 8,540 Bytes
0b22441 fdcd239 a74d750 5d3e4b5 0b22441 f4dd190 59c2b1a fdcd239 f4dd190 0b22441 fdcd239 80ea4ee fdcd239 3db038d 80ea4ee fdcd239 80ea4ee 3db038d f413f92 0b22441 5d3e4b5 6b64da4 5d3e4b5 59c2b1a 5d3e4b5 59c2b1a c7471d1 59c2b1a 5d3e4b5 59c2b1a f413f92 3db038d f413f92 59c2b1a f413f92 c7471d1 f413f92 c7471d1 f413f92 59c2b1a 0b22441 59c2b1a 0b22441 59c2b1a f4dd190 59c2b1a 0b22441 59c2b1a 5d3e4b5 59c2b1a fdcd239 59c2b1a 5d3e4b5 fdcd239 80ea4ee 5d3e4b5 80ea4ee 5d3e4b5 fdcd239 5d3e4b5 59c2b1a 5d3e4b5 59c2b1a 5d3e4b5 59c2b1a 5d3e4b5 fdcd239 59c2b1a fdcd239 59c2b1a |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 |
# ran into memory issues with safetensors. this code moves by them.
import contextlib, json, mmap, os, warnings
import torch
from _bighash import hash
class WritingSafeTensors:
def __init__(self, name, file_size=16*1024*1024*1024, deduplicate=False, save_on_crash=False, **metadata):
self.name = name.removesuffix('.safetensors')
self.metadata = metadata
self.file = self.File(self.name + '.safetensors')
self.files = {self.file.filename:self.file}
self.file_size = file_size
self.weight_map = {}
if deduplicate:
warnings.warn('Safetensors deduplication enabled. The file will not be readable with the official library without https://github.com/huggingface/safetensors/pull/586', stacklevel=2)
self.hash_map = {} if deduplicate else None
self.save_on_crash = save_on_crash
def add(self, name, tensor):
if self.hash_map is None:
self.file.add(name, tensor, return_hash=False)
image_of = name
else:
tensor_hash = self.file.add(name, tensor, return_hash=True)
image_of = self.hash_map.setdefault(tensor_hash, name)
if image_of is not name:
self.file.undo(name, tensor)
image_file = self.weight_map[image_of]
image_file.add(name, tensor, return_hash=False, image_of=image_of)
self.weight_map[name] = image_file
else:
print(name, '...')
if self.file.size >= self.file_size:
self.file.undo(name, tensor)
ct = len(self.files)
if len(self.files) == 1:
self.file.rename(f'{self.name}-{ct:05}.safetensors')
self.file.set_metadata(index = str(ct))
self.files = {self.file.filename:self.file}
ct += 1
self.file = self.File(f'{self.name}-{ct:05}.safetensors', index = ct)
self.files[self.file.filename] = self.file
self.file.add(name, tensor, return_hash=False)
self.weight_map[name] = self.file
def finalize(self):
if len(self.files) == 1:
self.file.set_metadata(**self.metadata)
self.file.finalize()
else:
index_name = self.name + '.safetensors.index.json'
print(index_name, '...')
total_size = 0
tot = len(self.files)
for ct, file in enumerate(self.files.values()):
ct += 1
file.rename(f'{self.name}-{ct:05}-of-{tot:06}.safetensors')
file.finalize()
total_size += file.size
with open(index_name, 'w') as fh:
json.dump(
{
'metadata': {
**{
k: v if type(v) in [int, float, str, list, tuple, dict] else str(v)
for k, v in self.metadata.items()
},
'total_size': total_size,
},
'weight_map': {
name: file.filename
for name, file in self.weight_map.items()
},
},
fh,
indent = '\t',
)
del self.file
del self.files
del self.metadata
def delete(self):
for file in self.files.values():
file.delete()
del self.file
del self.files
del self.metadata
def __enter__(self):
return self
def __exit__(self, Exc, exc, tb):
throw = None
if Exc is None or self.save_on_crash:
try:
self.finalize()
except:
self.delete()
raise
else:
self.delete()
class File:
def __init__(self, filename, **metadata):
print(filename, '...')
self.filename = filename
self.fd = os.open(self.filename, os.O_RDWR | os.O_CREAT)
self.size = 0
self.capacity = 0
self.mmapview = None
self.header = {'__metadata__': {k:str(v) for k,v in metadata.items()}}
self.finalized = False
def _reserve(self, length):
if self.size + length > self.capacity:
new_capacity = self.size * 2
if new_capacity < self.size + length:
new_capacity = (((self.size + length)*2 - 1) // mmap.PAGESIZE + 1) * mmap.PAGESIZE
os.truncate(self.filename, new_capacity)
self.mmapview = memoryview(mmap.mmap(self.fd, new_capacity))
self.capacity = new_capacity
def add(self, name, tensor, return_hash, image_of=None):
length = tensor.numel() * tensor.dtype.itemsize
if image_of is None:
self._reserve(length)
start, end = self.size, self.size + length
torch.frombuffer(
self.mmapview[start : end],
dtype=tensor.dtype, count=tensor.numel(),
).view(tensor.shape or [1])[:] = tensor
#assert len(self.header)<2 or max(list(self.header.items())[1:], key=lambda item:item[1]['data_offsets'])[1]['data_offsets'][-1] == self.size
assert end >= self.size
self.size = end
else:
image = self.header[image_of]
start, end = image['data_offsets']
assert end - start == length
assert (tensor == torch.frombuffer(
self.mmapview[start : end],
dtype=tensor.dtype, count=tensor.numel(),
).view(tensor.shape)).all()
tensor.flatten()
if return_hash:
tensor_hash = hash(self.mmapview[start : end])
else:
tensor_hash = None
self.header[name] = {
'dtype':
str(tensor.dtype).rsplit('.',1)[-1]
.replace('float','F')
.replace('uint','U')
.replace('int','I')
.removesuffix('uz')
.removesuffix('fn')
.upper(),
'shape':
list(tensor.shape),
'data_offsets':
[start, end],
}
return tensor_hash
def undo(self, name, tensor):
last_name = None
last_header = None
#max_name, max_header = max(list(self.header.items())[1:], key = lambda item: item[1]['data_offsets'][-1])
#assert max_name == name
#assert max_header['data_offsets'][-1] == self.size
length = tensor.numel() * tensor.dtype.itemsize
assert [self.size - length, self.size] == self.header[name]['data_offsets']
self.size -= length
del self.header[name]
#max_name, max_header = max(list(self.header.items())[1:], key = lambda item: item[1]['data_offsets'][-1])
#assert max_header['data_offsets'][-1] == self.size
def set_metadata(self, **metadata):
m = self.header['__metadata__']
for k, v in metadata.items():
m[k] = str(v)
def rename(self, filename):
os.rename(self.filename, filename)
self.filename = filename
return filename
def finalize(self):
print(self.filename, '...')
header = json.dumps(self.header, separators=[',',':']).encode()
insert = len(header) + 8
self._reserve(insert)
self.mmapview[insert:insert+self.size] = self.mmapview[:self.size]
self.size += insert
self.mmapview[:8] = len(header).to_bytes(8, 'little')
self.mmapview[8:insert] = header
del self.header
del self.mmapview
os.close(self.fd)
os.truncate(self.filename, self.size)
self.finalized = True
def delete(self):
if not self.finalized:
print('deleting', self.filename, '...')
del self.header
del self.mmapview
os.close(self.fd)
self.finalized = True
os.unlink(self.filename)
|