Outer Step 0. Inner Step 4. Batch Size 332
Browse files- config.json +8 -2
- model.safetensors +1 -1
config.json
CHANGED
@@ -267,13 +267,19 @@
|
|
267 |
"AutoConfig": "distributed/optimized-gpt2-500m--configuration_gpt_optimized.GPTOptimConfig",
|
268 |
"AutoModelForCausalLM": "distributed/optimized-gpt2-500m--modeling_gpt_optimized.GPTOptim"
|
269 |
},
|
270 |
-
"block_list": [
|
|
|
|
|
|
|
|
|
|
|
|
|
271 |
"block_size": 1024,
|
272 |
"bos_token_id": 50256,
|
273 |
"embd_pdrop": 0.1,
|
274 |
"eos_token_id": 50256,
|
275 |
"initializer_range": 0.02,
|
276 |
-
"inner_step":
|
277 |
"inner_steps": 0,
|
278 |
"last_allreduce_block": 4016775,
|
279 |
"layer_norm_epsilon": 1e-05,
|
|
|
267 |
"AutoConfig": "distributed/optimized-gpt2-500m--configuration_gpt_optimized.GPTOptimConfig",
|
268 |
"AutoModelForCausalLM": "distributed/optimized-gpt2-500m--modeling_gpt_optimized.GPTOptim"
|
269 |
},
|
270 |
+
"block_list": [
|
271 |
+
5048818,
|
272 |
+
5048822,
|
273 |
+
5048826,
|
274 |
+
5048830,
|
275 |
+
5048834
|
276 |
+
],
|
277 |
"block_size": 1024,
|
278 |
"bos_token_id": 50256,
|
279 |
"embd_pdrop": 0.1,
|
280 |
"eos_token_id": 50256,
|
281 |
"initializer_range": 0.02,
|
282 |
+
"inner_step": 4,
|
283 |
"inner_steps": 0,
|
284 |
"last_allreduce_block": 4016775,
|
285 |
"layer_norm_epsilon": 1e-05,
|
model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 4040701744
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a4e605b04ff3e3ae191563ea99bfe1a7b3ec04e0a9e48a5345fc93803cb1bb98
|
3 |
size 4040701744
|