maher4488 commited on
Commit
616a2e3
·
verified ·
1 Parent(s): 00ac6f2

Outer Step 0. Inner Step 2. Batch Size 440

Browse files
Files changed (2) hide show
  1. config.json +9 -3
  2. model.safetensors +1 -1
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "distributed/optimized-gpt2-1b",
3
  "activation_function": "gelu_new",
4
  "all_reduce_scores": {
5
  "0": "SUCCESS",
@@ -267,13 +267,19 @@
267
  "AutoConfig": "distributed/optimized-gpt2-500m--configuration_gpt_optimized.GPTOptimConfig",
268
  "AutoModelForCausalLM": "distributed/optimized-gpt2-500m--modeling_gpt_optimized.GPTOptim"
269
  },
270
- "block_list": [],
 
 
 
 
 
 
271
  "block_size": 1024,
272
  "bos_token_id": 50256,
273
  "embd_pdrop": 0.1,
274
  "eos_token_id": 50256,
275
  "initializer_range": 0.02,
276
- "inner_step": 0,
277
  "inner_steps": 0,
278
  "last_allreduce_block": 4016775,
279
  "layer_norm_epsilon": 1e-05,
 
1
  {
2
+ "_name_or_path": "maher4488/minor3",
3
  "activation_function": "gelu_new",
4
  "all_reduce_scores": {
5
  "0": "SUCCESS",
 
267
  "AutoConfig": "distributed/optimized-gpt2-500m--configuration_gpt_optimized.GPTOptimConfig",
268
  "AutoModelForCausalLM": "distributed/optimized-gpt2-500m--modeling_gpt_optimized.GPTOptim"
269
  },
270
+ "block_list": [
271
+ 5049718,
272
+ 5049723,
273
+ 5049728,
274
+ 5049732,
275
+ 5049736
276
+ ],
277
  "block_size": 1024,
278
  "bos_token_id": 50256,
279
  "embd_pdrop": 0.1,
280
  "eos_token_id": 50256,
281
  "initializer_range": 0.02,
282
+ "inner_step": 2,
283
  "inner_steps": 0,
284
  "last_allreduce_block": 4016775,
285
  "layer_norm_epsilon": 1e-05,
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:dcbe9e05268c20b6c5a24174c0482f755f8a38e4487e990bdcc7b98834890d4c
3
  size 4040701744
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1baf684866d64466533e4fa314d0a84cd513d443059d5a2347666ae9a6fb7858
3
  size 4040701744