wardenga commited on
Commit
3ed74a8
·
verified ·
1 Parent(s): 05b3471

Upload config

Browse files
Files changed (1) hide show
  1. config.json +63 -9
config.json CHANGED
@@ -1,9 +1,63 @@
1
- {
2
- "cl_dropout_prob": 0.1,
3
- "cl_hidden_size": 32,
4
- "class_weights": null,
5
- "label_token_len": 3,
6
- "model_name": "google/flan-t5-small",
7
- "n_training_steps": null,
8
- "n_warmup_steps": null
9
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "T5ForConditionalGeneration"
4
+ ],
5
+ "cl_dropout_prob": 0.1,
6
+ "cl_hidden_size": 32,
7
+ "classifier_dropout": 0.0,
8
+ "d_ff": 1024,
9
+ "d_kv": 64,
10
+ "d_model": 512,
11
+ "decoder_start_token_id": 0,
12
+ "dense_act_fn": "gelu_new",
13
+ "dropout_rate": 0.1,
14
+ "eos_token_id": 1,
15
+ "feed_forward_proj": "gated-gelu",
16
+ "initializer_factor": 1.0,
17
+ "is_encoder_decoder": true,
18
+ "is_gated_act": true,
19
+ "label_token_len": 3,
20
+ "layer_norm_epsilon": 1e-06,
21
+ "model_type": "t5",
22
+ "n_positions": 512,
23
+ "num_decoder_layers": 8,
24
+ "num_heads": 6,
25
+ "num_layers": 8,
26
+ "output_past": true,
27
+ "pad_token_id": 0,
28
+ "relative_attention_max_distance": 128,
29
+ "relative_attention_num_buckets": 32,
30
+ "task_specific_params": {
31
+ "summarization": {
32
+ "early_stopping": true,
33
+ "length_penalty": 2.0,
34
+ "max_length": 200,
35
+ "min_length": 30,
36
+ "no_repeat_ngram_size": 3,
37
+ "num_beams": 4,
38
+ "prefix": "summarize: "
39
+ },
40
+ "translation_en_to_de": {
41
+ "early_stopping": true,
42
+ "max_length": 300,
43
+ "num_beams": 4,
44
+ "prefix": "translate English to German: "
45
+ },
46
+ "translation_en_to_fr": {
47
+ "early_stopping": true,
48
+ "max_length": 300,
49
+ "num_beams": 4,
50
+ "prefix": "translate English to French: "
51
+ },
52
+ "translation_en_to_ro": {
53
+ "early_stopping": true,
54
+ "max_length": 300,
55
+ "num_beams": 4,
56
+ "prefix": "translate English to Romanian: "
57
+ }
58
+ },
59
+ "tie_word_embeddings": false,
60
+ "transformers_version": "4.46.3",
61
+ "use_cache": true,
62
+ "vocab_size": 32128
63
+ }