|
|
|
import contextlib, json, mmap, os, warnings |
|
|
|
import torch |
|
|
|
from _bighash import hash |
|
|
|
class WritingSafeTensors: |
|
def __init__(self, name, file_size=16*1024*1024*1024, deduplicate=False, save_on_crash=False, **metadata): |
|
self.name = name.removesuffix('.safetensors') |
|
self.metadata = metadata |
|
self.file = self.File(self.name + '.safetensors') |
|
self.files = {self.file.filename:self.file} |
|
self.file_size = file_size |
|
self.weight_map = {} |
|
if deduplicate: |
|
warnings.warn('Safetensors deduplication enabled. The file will not be readable with the official library without https://github.com/huggingface/safetensors/pull/586', stacklevel=2) |
|
self.hash_map = {} if deduplicate else None |
|
self.save_on_crash = save_on_crash |
|
def add(self, name, tensor): |
|
if self.hash_map is None: |
|
self.file.add(name, tensor, return_hash=False) |
|
image_of = name |
|
else: |
|
tensor_hash = self.file.add(name, tensor, return_hash=True) |
|
image_of = self.hash_map.setdefault(tensor_hash, name) |
|
if image_of is not name: |
|
self.file.undo(name, tensor) |
|
image_file = self.weight_map[image_of] |
|
image_file.add(name, tensor, return_hash=False, image_of=image_of) |
|
self.weight_map[name] = image_file |
|
else: |
|
print(name, '...') |
|
if self.file.size >= self.file_size: |
|
self.file.undo(name, tensor) |
|
ct = len(self.files) |
|
if len(self.files) == 1: |
|
self.file.rename(f'{self.name}-{ct:05}.safetensors') |
|
self.file.set_metadata(index = str(ct)) |
|
self.files = {self.file.filename:self.file} |
|
ct += 1 |
|
self.file = self.File(f'{self.name}-{ct:05}.safetensors', index = ct) |
|
self.files[self.file.filename] = self.file |
|
self.file.add(name, tensor, return_hash=False) |
|
self.weight_map[name] = self.file |
|
def finalize(self): |
|
if len(self.files) == 1: |
|
self.file.set_metadata(**self.metadata) |
|
self.file.finalize() |
|
else: |
|
index_name = self.name + '.safetensors.index.json' |
|
print(index_name, '...') |
|
total_size = 0 |
|
tot = len(self.files) |
|
for ct, file in enumerate(self.files.values()): |
|
ct += 1 |
|
file.rename(f'{self.name}-{ct:05}-of-{tot:06}.safetensors') |
|
file.finalize() |
|
total_size += file.size |
|
with open(index_name, 'w') as fh: |
|
json.dump( |
|
{ |
|
'metadata': { |
|
**{ |
|
k: v if type(v) in [int, float, str, list, tuple, dict] else str(v) |
|
for k, v in self.metadata.items() |
|
}, |
|
'total_size': total_size, |
|
}, |
|
'weight_map': { |
|
name: file.filename |
|
for name, file in self.weight_map.items() |
|
}, |
|
}, |
|
fh, |
|
indent = '\t', |
|
) |
|
del self.file |
|
del self.files |
|
del self.metadata |
|
def delete(self): |
|
for file in self.files.values(): |
|
file.delete() |
|
del self.file |
|
del self.files |
|
del self.metadata |
|
def __enter__(self): |
|
return self |
|
def __exit__(self, Exc, exc, tb): |
|
throw = None |
|
if Exc is None or self.save_on_crash: |
|
try: |
|
self.finalize() |
|
except: |
|
self.delete() |
|
raise |
|
else: |
|
self.delete() |
|
|
|
class File: |
|
def __init__(self, filename, **metadata): |
|
print(filename, '...') |
|
self.filename = filename |
|
self.fd = os.open(self.filename, os.O_RDWR | os.O_CREAT) |
|
self.size = 0 |
|
self.capacity = 0 |
|
self.mmapview = None |
|
self.header = {'__metadata__': {k:str(v) for k,v in metadata.items()}} |
|
self.finalized = False |
|
def _reserve(self, length): |
|
if self.size + length > self.capacity: |
|
new_capacity = self.size * 2 |
|
if new_capacity < self.size + length: |
|
new_capacity = (((self.size + length)*2 - 1) // mmap.PAGESIZE + 1) * mmap.PAGESIZE |
|
os.truncate(self.filename, new_capacity) |
|
self.mmapview = memoryview(mmap.mmap(self.fd, new_capacity)) |
|
self.capacity = new_capacity |
|
def add(self, name, tensor, return_hash, image_of=None): |
|
length = tensor.numel() * tensor.dtype.itemsize |
|
if image_of is None: |
|
self._reserve(length) |
|
start, end = self.size, self.size + length |
|
torch.frombuffer( |
|
self.mmapview[start : end], |
|
dtype=tensor.dtype, count=tensor.numel(), |
|
).view(tensor.shape or [1])[:] = tensor |
|
|
|
assert end >= self.size |
|
self.size = end |
|
else: |
|
image = self.header[image_of] |
|
start, end = image['data_offsets'] |
|
assert end - start == length |
|
assert (tensor == torch.frombuffer( |
|
self.mmapview[start : end], |
|
dtype=tensor.dtype, count=tensor.numel(), |
|
).view(tensor.shape)).all() |
|
|
|
tensor.flatten() |
|
if return_hash: |
|
tensor_hash = hash(self.mmapview[start : end]) |
|
else: |
|
tensor_hash = None |
|
|
|
self.header[name] = { |
|
'dtype': |
|
str(tensor.dtype).rsplit('.',1)[-1] |
|
.replace('float','F') |
|
.replace('uint','U') |
|
.replace('int','I') |
|
.removesuffix('uz') |
|
.removesuffix('fn') |
|
.upper(), |
|
'shape': |
|
list(tensor.shape), |
|
'data_offsets': |
|
[start, end], |
|
} |
|
return tensor_hash |
|
def undo(self, name, tensor): |
|
last_name = None |
|
last_header = None |
|
|
|
|
|
|
|
length = tensor.numel() * tensor.dtype.itemsize |
|
assert [self.size - length, self.size] == self.header[name]['data_offsets'] |
|
self.size -= length |
|
del self.header[name] |
|
|
|
|
|
def set_metadata(self, **metadata): |
|
m = self.header['__metadata__'] |
|
for k, v in metadata.items(): |
|
m[k] = str(v) |
|
def rename(self, filename): |
|
os.rename(self.filename, filename) |
|
self.filename = filename |
|
return filename |
|
def finalize(self): |
|
print(self.filename, '...') |
|
header = json.dumps(self.header, separators=[',',':']).encode() |
|
insert = len(header) + 8 |
|
self._reserve(insert) |
|
self.mmapview[insert:insert+self.size] = self.mmapview[:self.size] |
|
self.size += insert |
|
self.mmapview[:8] = len(header).to_bytes(8, 'little') |
|
self.mmapview[8:insert] = header |
|
del self.header |
|
del self.mmapview |
|
os.close(self.fd) |
|
os.truncate(self.filename, self.size) |
|
self.finalized = True |
|
def delete(self): |
|
if not self.finalized: |
|
print('deleting', self.filename, '...') |
|
del self.header |
|
del self.mmapview |
|
os.close(self.fd) |
|
self.finalized = True |
|
os.unlink(self.filename) |
|
|