user
commited on
Commit
·
62b9ba4
1
Parent(s):
8d9f1cb
deepseek-ai_DeepSeek-V3_86518964eaef84e3fdd98e9861759a1384f9c29d
Browse files
deepseek-ai_DeepSeek-V3_86518964eaef84e3fdd98e9861759a1384f9c29d.logits.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1993feb3897078d1269140657755c892fdf86e68cbc040c4997a4b73b78524f8
|
3 |
+
size 437566024
|
run_test.py
CHANGED
@@ -1,43 +1,51 @@
|
|
1 |
#!/usr/bin/env python3
|
2 |
import os, sys
|
3 |
-
import transformers, torch, tqdm
|
4 |
|
5 |
model_id, revision = sys.argv[1:]
|
6 |
user, model = model_id.split('/')
|
7 |
|
8 |
-
|
9 |
-
os.makedirs(dir, exist_ok=True)
|
10 |
-
os.chdir(dir)
|
11 |
|
12 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
13 |
|
|
|
|
|
|
|
14 |
def store_tensor(descr, tensor):
|
15 |
-
|
16 |
-
dtypestr = str(tensor.dtype).rsplit('.',1)[-1]
|
17 |
-
fn = f'{descr}.{shapestr}.{dtypestr}.mmap'
|
18 |
-
mmap = torch.from_file(fn, shared=True, dtype=tensor.dtype, size=tensor.numel())
|
19 |
-
mmap[:] = tensor.flatten()
|
20 |
-
print(fn, end='\r', flush=True)
|
21 |
|
22 |
IDX = 0
|
23 |
module_names = {mod:name for name, mod in pipe.model.named_modules()}
|
|
|
24 |
def hook(module, inputs, outputs):
|
25 |
global IDX
|
26 |
name = module_names[module]
|
27 |
for idx, input in enumerate(inputs):
|
28 |
if isinstance(input, torch.Tensor):
|
29 |
-
store_tensor(f'{
|
30 |
if isinstance(outputs, torch.Tensor):
|
31 |
-
store_tensor(f'{
|
32 |
else:
|
33 |
for idx, output in enumerate(outputs):
|
34 |
if isinstance(output, torch.Tensor):
|
35 |
-
store_tensor(f'{
|
36 |
IDX += 1
|
37 |
|
38 |
for module in pipe.model.modules():
|
39 |
module.register_forward_hook(hook)
|
40 |
|
41 |
-
|
|
|
|
|
42 |
print()
|
43 |
print(output)
|
|
|
1 |
#!/usr/bin/env python3
|
2 |
import os, sys
|
3 |
+
import accelerate, safetensors.torch, transformers, torch, tqdm
|
4 |
|
5 |
model_id, revision = sys.argv[1:]
|
6 |
user, model = model_id.split('/')
|
7 |
|
8 |
+
fn = f'{user}_{model}_{revision}.logits.safetensors'
|
|
|
|
|
9 |
|
10 |
+
config = transformers.AutoConfig.from_pretrained(model_id, revision=revision, trust_remote_code=True)
|
11 |
+
tokenizer = transformers.AutoTokenizer.from_pretrained(model_id, revision=revision, trust_remote_code=True)
|
12 |
+
Model = transformers.AutoModelForCausalLM
|
13 |
+
if config.model_type == 'deepseek_v3':
|
14 |
+
#Model = transformers.DeepseekV3ForCausalLM
|
15 |
+
pass
|
16 |
+
model = Model.from_pretrained(model_id, trust_remote_code=True, torch_dtype='auto', device_map='cpu')
|
17 |
+
if config.model_type == 'deepseek_v3':
|
18 |
+
model._supports_cache_class = False
|
19 |
+
model = accelerate.cpu_offload(model, 'cuda:0', offload_buffers=True)
|
20 |
|
21 |
+
pipe = transformers.pipeline('text-generation', model=model, config=config, tokenizer=tokenizer)
|
22 |
+
|
23 |
+
tensors = {}
|
24 |
def store_tensor(descr, tensor):
|
25 |
+
tensors[descr] = tensor.cpu().detach().contiguous()
|
|
|
|
|
|
|
|
|
|
|
26 |
|
27 |
IDX = 0
|
28 |
module_names = {mod:name for name, mod in pipe.model.named_modules()}
|
29 |
+
tensors = {}
|
30 |
def hook(module, inputs, outputs):
|
31 |
global IDX
|
32 |
name = module_names[module]
|
33 |
for idx, input in enumerate(inputs):
|
34 |
if isinstance(input, torch.Tensor):
|
35 |
+
store_tensor(f'{name}.input.{idx}', input);
|
36 |
if isinstance(outputs, torch.Tensor):
|
37 |
+
store_tensor(f'{name}.output', outputs);
|
38 |
else:
|
39 |
for idx, output in enumerate(outputs):
|
40 |
if isinstance(output, torch.Tensor):
|
41 |
+
store_tensor(f'{name}.output.{idx}', output);
|
42 |
IDX += 1
|
43 |
|
44 |
for module in pipe.model.modules():
|
45 |
module.register_forward_hook(hook)
|
46 |
|
47 |
+
prompt = 'Once upon a time,'
|
48 |
+
output = pipe(prompt, do_sample=False, max_new_tokens=1, temperature=1.0, top_p=1.0)
|
49 |
+
safetensors.torch.save_file(tensors, fn, dict(model_id=model_id, revision=revision, prompt=prompt))
|
50 |
print()
|
51 |
print(output)
|