indexed deduplicated safetensors
Browse files- _safetensors.py +5 -3
- scripts/compare_safetensors.py +23 -1
_safetensors.py
CHANGED
@@ -25,7 +25,9 @@ class WritingSafeTensors:
|
|
25 |
image_of = self.hash_map.setdefault(tensor_hash, name)
|
26 |
if image_of is not name:
|
27 |
self.file.undo(name, tensor)
|
28 |
-
self.weight_map[image_of]
|
|
|
|
|
29 |
else:
|
30 |
print(name, '...')
|
31 |
if self.file.size >= self.file_size:
|
@@ -38,7 +40,7 @@ class WritingSafeTensors:
|
|
38 |
ct += 1
|
39 |
self.file = self.File(f'{self.name}-{ct:05}.safetensors', index = ct)
|
40 |
self.files[self.file.filename] = self.file
|
41 |
-
self.file.add(name, tensor)
|
42 |
self.weight_map[name] = self.file
|
43 |
def finalize(self):
|
44 |
if len(self.files) == 1:
|
@@ -62,7 +64,7 @@ class WritingSafeTensors:
|
|
62 |
k: v if type(v) in [int, float, str, list, tuple, dict] else str(v)
|
63 |
for k, v in self.metadata.items()
|
64 |
},
|
65 |
-
'total_size':
|
66 |
},
|
67 |
'weight_map': {
|
68 |
name: file.filename
|
|
|
25 |
image_of = self.hash_map.setdefault(tensor_hash, name)
|
26 |
if image_of is not name:
|
27 |
self.file.undo(name, tensor)
|
28 |
+
image_file = self.weight_map[image_of]
|
29 |
+
image_file.add(name, tensor, return_hash=False, image_of=image_of)
|
30 |
+
self.weight_map[name] = image_file
|
31 |
else:
|
32 |
print(name, '...')
|
33 |
if self.file.size >= self.file_size:
|
|
|
40 |
ct += 1
|
41 |
self.file = self.File(f'{self.name}-{ct:05}.safetensors', index = ct)
|
42 |
self.files[self.file.filename] = self.file
|
43 |
+
self.file.add(name, tensor, return_hash=False)
|
44 |
self.weight_map[name] = self.file
|
45 |
def finalize(self):
|
46 |
if len(self.files) == 1:
|
|
|
64 |
k: v if type(v) in [int, float, str, list, tuple, dict] else str(v)
|
65 |
for k, v in self.metadata.items()
|
66 |
},
|
67 |
+
'total_size': total_size,
|
68 |
},
|
69 |
'weight_map': {
|
70 |
name: file.filename
|
scripts/compare_safetensors.py
CHANGED
@@ -1,10 +1,32 @@
|
|
1 |
#!/usr/bin/env python3
|
2 |
import safetensors.torch, torch # any tensor library would work
|
3 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
4 |
def compare(*fns):
|
5 |
global files, mismatching_keys, avgs, dists, errs
|
6 |
|
7 |
-
files = [
|
8 |
|
9 |
assert set(files[0].keys()) == set(files[1].keys())
|
10 |
|
|
|
1 |
#!/usr/bin/env python3
|
2 |
import safetensors.torch, torch # any tensor library would work
|
3 |
|
4 |
+
def safe_open(filename, framework):
|
5 |
+
if filename.endswith('.json'):
|
6 |
+
class IndexFile:
|
7 |
+
def __init__(self, filename, framework):
|
8 |
+
import json
|
9 |
+
with open(filename) as fh:
|
10 |
+
index = json.load(fh)
|
11 |
+
files = {
|
12 |
+
file: safetensors.safe_open(file, framework=framework)
|
13 |
+
for file in index['weight_map'].values()
|
14 |
+
}
|
15 |
+
self.weight_map = {k:files[v] for k,v in index['weight_map'].items()}
|
16 |
+
def get_tensor(self, name):
|
17 |
+
return self.weight_map[name].get_tensor(name)
|
18 |
+
def get_slice(self, name):
|
19 |
+
return self.weight_map[name].get_slice(name)
|
20 |
+
def keys(self):
|
21 |
+
return self.weight_map.keys()
|
22 |
+
return IndexFile(filename, framework=framework)
|
23 |
+
else:
|
24 |
+
return safetensors.safe_open(filename, framework=framework)
|
25 |
+
|
26 |
def compare(*fns):
|
27 |
global files, mismatching_keys, avgs, dists, errs
|
28 |
|
29 |
+
files = [safe_open(files, framework='pt') for files in fns]
|
30 |
|
31 |
assert set(files[0].keys()) == set(files[1].keys())
|
32 |
|