File size: 1,569 Bytes
975fff9
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
model-00001-of-00022.safetensors filter=lfs diff=lfs merge=lfs -text
model-00002-of-00022.safetensors filter=lfs diff=lfs merge=lfs -text
model-00003-of-00022.safetensors filter=lfs diff=lfs merge=lfs -text
model-00004-of-00022.safetensors filter=lfs diff=lfs merge=lfs -text
model-00005-of-00022.safetensors filter=lfs diff=lfs merge=lfs -text
model-00006-of-00022.safetensors filter=lfs diff=lfs merge=lfs -text
model-00007-of-00022.safetensors filter=lfs diff=lfs merge=lfs -text
model-00008-of-00022.safetensors filter=lfs diff=lfs merge=lfs -text
model-00009-of-00022.safetensors filter=lfs diff=lfs merge=lfs -text
model-00010-of-00022.safetensors filter=lfs diff=lfs merge=lfs -text
model-00011-of-00022.safetensors filter=lfs diff=lfs merge=lfs -text
model-00012-of-00022.safetensors filter=lfs diff=lfs merge=lfs -text
model-00013-of-00022.safetensors filter=lfs diff=lfs merge=lfs -text
model-00014-of-00022.safetensors filter=lfs diff=lfs merge=lfs -text
model-00015-of-00022.safetensors filter=lfs diff=lfs merge=lfs -text
model-00016-of-00022.safetensors filter=lfs diff=lfs merge=lfs -text
model-00017-of-00022.safetensors filter=lfs diff=lfs merge=lfs -text
model-00018-of-00022.safetensors filter=lfs diff=lfs merge=lfs -text
model-00019-of-00022.safetensors filter=lfs diff=lfs merge=lfs -text
model-00020-of-00022.safetensors filter=lfs diff=lfs merge=lfs -text
model-00021-of-00022.safetensors filter=lfs diff=lfs merge=lfs -text
model-00022-of-00022.safetensors filter=lfs diff=lfs merge=lfs -text
tokenizer.json filter=lfs diff=lfs merge=lfs -text