Outer Step 2. Inner Step 240. Batch Size 476
Browse files- config.json +11 -11
- inner_optimizer.pt +1 -1
- model.safetensors +1 -1
config.json
CHANGED
@@ -268,23 +268,23 @@
|
|
268 |
"AutoModelForCausalLM": "distributed/optimized-gpt2-500m--modeling_gpt_optimized.GPTOptim"
|
269 |
},
|
270 |
"block_list": [
|
271 |
-
|
272 |
-
|
273 |
-
|
274 |
-
|
275 |
-
|
276 |
-
|
277 |
-
|
278 |
-
|
279 |
-
|
280 |
-
|
281 |
],
|
282 |
"block_size": 1024,
|
283 |
"bos_token_id": 50256,
|
284 |
"embd_pdrop": 0.1,
|
285 |
"eos_token_id": 50256,
|
286 |
"initializer_range": 0.02,
|
287 |
-
"inner_step":
|
288 |
"inner_steps": 0,
|
289 |
"last_allreduce_block": 5146835,
|
290 |
"layer_norm_epsilon": 1e-05,
|
|
|
268 |
"AutoModelForCausalLM": "distributed/optimized-gpt2-500m--modeling_gpt_optimized.GPTOptim"
|
269 |
},
|
270 |
"block_list": [
|
271 |
+
5148577,
|
272 |
+
5148581,
|
273 |
+
5148586,
|
274 |
+
5148589,
|
275 |
+
5148593,
|
276 |
+
5148597,
|
277 |
+
5148602,
|
278 |
+
5148606,
|
279 |
+
5148610,
|
280 |
+
5148614
|
281 |
],
|
282 |
"block_size": 1024,
|
283 |
"bos_token_id": 50256,
|
284 |
"embd_pdrop": 0.1,
|
285 |
"eos_token_id": 50256,
|
286 |
"initializer_range": 0.02,
|
287 |
+
"inner_step": 240,
|
288 |
"inner_steps": 0,
|
289 |
"last_allreduce_block": 5146835,
|
290 |
"layer_norm_epsilon": 1e-05,
|
inner_optimizer.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 8081781770
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1213de4d391e0bc516b554f91e33eed4f21fb29a664fe00449f88003fb7d299b
|
3 |
size 8081781770
|
model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 4040701744
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5811a9818bacfa4ae5f66aac2ca1f15f78527610d23d5411d3e79c5b9e397c3b
|
3 |
size 4040701744
|