FastFlowLM commited on
Commit
6cd7d52
·
verified ·
1 Parent(s): e01f495

Initial upload

Browse files
Files changed (10) hide show
  1. .gitattributes +7 -0
  2. attn.xclbin +3 -0
  3. config.json +58 -0
  4. dequant.xclbin +3 -0
  5. layer.xclbin +3 -0
  6. lm_head.xclbin +3 -0
  7. mm.xclbin +3 -0
  8. model.q4nx +3 -0
  9. tokenizer.json +3 -0
  10. tokenizer_config.json +0 -0
.gitattributes CHANGED
@@ -33,3 +33,10 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ attn.xclbin filter=lfs diff=lfs merge=lfs -text
37
+ dequant.xclbin filter=lfs diff=lfs merge=lfs -text
38
+ layer.xclbin filter=lfs diff=lfs merge=lfs -text
39
+ lm_head.xclbin filter=lfs diff=lfs merge=lfs -text
40
+ mm.xclbin filter=lfs diff=lfs merge=lfs -text
41
+ model.q4nx filter=lfs diff=lfs merge=lfs -text
42
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
attn.xclbin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9033caf687b7c87faa8d0ecda055858b8845f0c510cc6bb49c7fdaeb1cbe2bf2
3
+ size 465163
config.json ADDED
@@ -0,0 +1,58 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "sliding_window_pattern": 6,
3
+ "architectures": [
4
+ "Gemma3ForCausalLM"
5
+ ],
6
+ "attention_bias": false,
7
+ "attention_dropout": 0.0,
8
+ "attn_logit_softcapping": null,
9
+ "bos_token_id": 2,
10
+ "eos_token_id": 1,
11
+ "final_logit_softcapping": null,
12
+ "head_dim": 256,
13
+ "hidden_activation": "gelu_pytorch_tanh",
14
+ "hidden_size": 640,
15
+ "initializer_range": 0.02,
16
+ "intermediate_size": 2048,
17
+ "layer_types": [
18
+ "sliding_attention",
19
+ "sliding_attention",
20
+ "sliding_attention",
21
+ "sliding_attention",
22
+ "sliding_attention",
23
+ "full_attention",
24
+ "sliding_attention",
25
+ "sliding_attention",
26
+ "sliding_attention",
27
+ "sliding_attention",
28
+ "sliding_attention",
29
+ "full_attention",
30
+ "sliding_attention",
31
+ "sliding_attention",
32
+ "sliding_attention",
33
+ "sliding_attention",
34
+ "sliding_attention",
35
+ "full_attention"
36
+ ],
37
+ "max_position_embeddings": 32768,
38
+ "model_type": "gemma3_text_only",
39
+ "num_attention_heads": 4,
40
+ "num_hidden_layers": 18,
41
+ "num_key_value_heads": 1,
42
+ "pad_token_id": 0,
43
+ "query_pre_attn_scalar": 256,
44
+ "rms_norm_eps": 1e-06,
45
+ "rope_local_base_freq": 10000.0,
46
+ "rope_scaling": null,
47
+ "rope_theta": 1000000.0,
48
+ "sliding_window": 512,
49
+ "torch_dtype": "bfloat16",
50
+ "transformers_version": "4.55.0.dev0",
51
+ "use_bidirectional_attention": false,
52
+ "use_cache": true,
53
+ "vocab_size": 262144,
54
+ "addr_qk": 9216,
55
+ "addr_kv": 34816,
56
+ "addr_kk": 13312,
57
+ "flm_version": "0.9.4"
58
+ }
dequant.xclbin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5b529e9c91b0d9def4c5394a21920c0ba01f3d83568dde40f04e4f27034cc714
3
+ size 103915
layer.xclbin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:510366571fdeb69af942c9aa51bcbcc9561683dd45c3c33d93dbd2f3393cbecb
3
+ size 172619
lm_head.xclbin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:80869836b3eecf2a43191c87355d8731358a999a1c560a5be22f9b28eae7c331
3
+ size 143115
mm.xclbin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2962eefe324053f9a3dac247d1ec7f27737a76eeb9e346fc1296ca1269a22ccb
3
+ size 218699
model.q4nx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bcec9d157dcf06da83410da80599383bd6405a9a28c3325984ebeda3416b46c7
3
+ size 503209096
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7d4046bf0505a327dd5a0abbb427ecd4fc82f99c2ceaa170bc61ecde12809b0c
3
+ size 33384570
tokenizer_config.json ADDED
The diff for this file is too large to render. See raw diff