codelion commited on
Commit
ddbd202
·
verified ·
1 Parent(s): ca6c7fa

(Trained with Unsloth)

Browse files
Files changed (2) hide show
  1. config.json +2 -2
  2. generation_config.json +1 -1
config.json CHANGED
@@ -1,4 +1,5 @@
1
  {
 
2
  "architectures": [
3
  "Gemma3ForCausalLM"
4
  ],
@@ -54,9 +55,8 @@
54
  "rope_scaling": null,
55
  "rope_theta": 1000000,
56
  "sliding_window": 512,
57
- "sliding_window_pattern": 6,
58
  "torch_dtype": "float16",
59
- "transformers_version": "4.53.1",
60
  "unsloth_fixed": true,
61
  "unsloth_version": "2025.7.3",
62
  "use_cache": true,
 
1
  {
2
+ "_sliding_window_pattern": 6,
3
  "architectures": [
4
  "Gemma3ForCausalLM"
5
  ],
 
55
  "rope_scaling": null,
56
  "rope_theta": 1000000,
57
  "sliding_window": 512,
 
58
  "torch_dtype": "float16",
59
+ "transformers_version": "4.53.2",
60
  "unsloth_fixed": true,
61
  "unsloth_version": "2025.7.3",
62
  "use_cache": true,
generation_config.json CHANGED
@@ -10,5 +10,5 @@
10
  "pad_token_id": 0,
11
  "top_k": 64,
12
  "top_p": 0.95,
13
- "transformers_version": "4.53.1"
14
  }
 
10
  "pad_token_id": 0,
11
  "top_k": 64,
12
  "top_p": 0.95,
13
+ "transformers_version": "4.53.2"
14
  }