theta commited on
Commit
deef725
·
1 Parent(s): 22503eb

Training in progress, step 800

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "gpt2",
3
  "activation_function": "gelu_new",
4
  "architectures": [
5
  "GPT2LMHeadModel"
@@ -8,6 +8,7 @@
8
  "bos_token_id": 50256,
9
  "embd_pdrop": 0.1,
10
  "eos_token_id": 50256,
 
11
  "initializer_range": 0.02,
12
  "layer_norm_epsilon": 1e-05,
13
  "model_type": "gpt2",
@@ -17,6 +18,7 @@
17
  "n_inner": null,
18
  "n_layer": 12,
19
  "n_positions": 1024,
 
20
  "reorder_and_upcast_attn": false,
21
  "resid_pdrop": 0.1,
22
  "scale_attn_by_inverse_layer_idx": false,
@@ -29,11 +31,12 @@
29
  "task_specific_params": {
30
  "text-generation": {
31
  "do_sample": true,
32
- "max_length": 50
33
  }
34
  },
 
35
  "torch_dtype": "float32",
36
  "transformers_version": "4.25.1",
37
  "use_cache": true,
38
- "vocab_size": 50257
39
  }
 
1
  {
2
+ "_name_or_path": "uer/gpt2-chinese-cluecorpussmall",
3
  "activation_function": "gelu_new",
4
  "architectures": [
5
  "GPT2LMHeadModel"
 
8
  "bos_token_id": 50256,
9
  "embd_pdrop": 0.1,
10
  "eos_token_id": 50256,
11
+ "gradient_checkpointing": false,
12
  "initializer_range": 0.02,
13
  "layer_norm_epsilon": 1e-05,
14
  "model_type": "gpt2",
 
18
  "n_inner": null,
19
  "n_layer": 12,
20
  "n_positions": 1024,
21
+ "output_past": true,
22
  "reorder_and_upcast_attn": false,
23
  "resid_pdrop": 0.1,
24
  "scale_attn_by_inverse_layer_idx": false,
 
31
  "task_specific_params": {
32
  "text-generation": {
33
  "do_sample": true,
34
+ "max_length": 320
35
  }
36
  },
37
+ "tokenizer_class": "BertTokenizer",
38
  "torch_dtype": "float32",
39
  "transformers_version": "4.25.1",
40
  "use_cache": true,
41
+ "vocab_size": 21128
42
  }
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:9d6a6bc5019e4292d10dbfe3e6ea9dd0de5eb4440d6471ec5fab3c251e41071e
3
- size 510398013
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:25ad785bbfc2b5826e165af144afa2a5ef7ab39619321e755f70c893e3d7c10e
3
+ size 420913725
runs/Dec14_05-08-17_139d60cc267c/events.out.tfevents.1670994674.139d60cc267c.75.0 CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:788d9212d244af31ce753e367968a49c919085b61c20d37ea2653dbe49950568
3
- size 4664
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:def682bbd7c926f25bd4f115c5d11b0abcac9b5aae1462e3951bafc274842a88
3
+ size 5948
runs/Dec14_06-25-22_139d60cc267c/1670999127.2919536/events.out.tfevents.1670999127.139d60cc267c.75.3 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9fa92b7b8bfd234316e393cb1607c6dad54db70489d99e02ac276b85bb7f7ace
3
+ size 5521
runs/Dec14_06-25-22_139d60cc267c/events.out.tfevents.1670999127.139d60cc267c.75.2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3eda536d57b651231826cb48ae561a24a60884ecb27d0bf7c92dca3a0e5e715a
3
+ size 4946
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:9ad0374218e0788ae338a43bb7bcf34000f492bd968256202517880a881287b3
3
  size 3451
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0690e1e201f43df2f21f1ae2ccd47285120f160e49c06029607dc8d840ab74bd
3
  size 3451