PyTorch
Safetensors
qwen2
unsloth
trl
sft
cvGod commited on
Commit
5bf0791
·
verified ·
1 Parent(s): 7501f58

Trained with Unsloth

Browse files
config.json CHANGED
@@ -1,16 +1,16 @@
1
  {
2
- "_name_or_path": "unsloth/deepseek-r1-distill-qwen-7b-unsloth-bnb-4bit",
3
  "architectures": [
4
  "Qwen2ForCausalLM"
5
  ],
6
  "attention_dropout": 0.0,
7
- "bos_token_id": 151646,
8
- "eos_token_id": 151643,
9
  "hidden_act": "silu",
10
  "hidden_size": 3584,
11
  "initializer_range": 0.02,
12
  "intermediate_size": 18944,
13
- "max_position_embeddings": 131072,
14
  "max_window_layers": 28,
15
  "model_type": "qwen2",
16
  "num_attention_heads": 28,
@@ -19,15 +19,13 @@
19
  "pad_token_id": 151654,
20
  "rms_norm_eps": 1e-06,
21
  "rope_scaling": null,
22
- "rope_theta": 10000,
23
- "sliding_window": null,
24
  "tie_word_embeddings": false,
25
  "torch_dtype": "float16",
26
- "transformers_version": "4.48.3",
27
- "unsloth_fixed": true,
28
  "unsloth_version": "2025.3.9",
29
- "use_cache": true,
30
- "use_mrope": false,
31
  "use_sliding_window": false,
32
  "vocab_size": 152064
33
  }
 
1
  {
2
+ "_name_or_path": "Mingsmilet/Qwen2.5-7B-R1-SFT",
3
  "architectures": [
4
  "Qwen2ForCausalLM"
5
  ],
6
  "attention_dropout": 0.0,
7
+ "bos_token_id": 151643,
8
+ "eos_token_id": 151645,
9
  "hidden_act": "silu",
10
  "hidden_size": 3584,
11
  "initializer_range": 0.02,
12
  "intermediate_size": 18944,
13
+ "max_position_embeddings": 32768,
14
  "max_window_layers": 28,
15
  "model_type": "qwen2",
16
  "num_attention_heads": 28,
 
19
  "pad_token_id": 151654,
20
  "rms_norm_eps": 1e-06,
21
  "rope_scaling": null,
22
+ "rope_theta": 1000000.0,
23
+ "sliding_window": 131072,
24
  "tie_word_embeddings": false,
25
  "torch_dtype": "float16",
26
+ "transformers_version": "4.49.0",
 
27
  "unsloth_version": "2025.3.9",
28
+ "use_cache": false,
 
29
  "use_sliding_window": false,
30
  "vocab_size": 152064
31
  }
generation_config.json CHANGED
@@ -1,11 +1,15 @@
1
  {
2
- "_from_model_config": true,
3
- "bos_token_id": 151646,
4
  "do_sample": true,
5
- "eos_token_id": 151643,
6
- "max_length": 131072,
 
 
 
7
  "pad_token_id": 151654,
8
- "temperature": 0.6,
9
- "top_p": 0.95,
10
- "transformers_version": "4.48.3"
 
 
11
  }
 
1
  {
2
+ "bos_token_id": 151643,
 
3
  "do_sample": true,
4
+ "eos_token_id": [
5
+ 151645,
6
+ 151643
7
+ ],
8
+ "max_length": 32768,
9
  "pad_token_id": 151654,
10
+ "repetition_penalty": 1.05,
11
+ "temperature": 0.7,
12
+ "top_k": 20,
13
+ "top_p": 0.8,
14
+ "transformers_version": "4.49.0"
15
  }
pytorch_model-00001-of-00004.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d32898a541b32b1a0c03337674cda452901d5203407c84eb530217335e14baa3
3
- size 4877685462
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:90dd35fc5289bf8c40fc13c20242d152817af0deadc07b45af891924a2e13da2
3
+ size 4877685654
pytorch_model-00002-of-00004.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:71a6a64f65675bb0ff066ebf10d34a2a9a5b81b54d28db301db99438dd2abdfd
3
- size 4932779304
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3ac23acd8febfa7062ee7bf6f55b27139d50ed0d97c0508eb866deae24e3fd19
3
+ size 4932779944
pytorch_model-00003-of-00004.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5bda60e39248e72a46842ca20dda4e0b938da7263722a222150cd58403c0edc5
3
- size 4330891034
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:978a8674e40ad70b9fd54fc08bd632d0b59257d8045e26d128df3efa93c91603
3
+ size 4330891098
pytorch_model-00004-of-00004.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:62f1a8ae4b44ed8bd6dd997b85ffea48ce048c1e35459b1bf0125b16235aa2c3
3
  size 1089996165
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f6f1b2ce95ac135390dbf85a0b90e11cee908fae0c8a6aff9628d84f4e835998
3
  size 1089996165