hasibirok0 commited on
Commit
eb00427
·
verified ·
1 Parent(s): 6cd5a6d

Model save

Browse files
README.md CHANGED
@@ -36,11 +36,11 @@ More information needed
36
 
37
  The following hyperparameters were used during training:
38
  - learning_rate: 0.0002
39
- - train_batch_size: 32
40
  - eval_batch_size: 8
41
  - seed: 42
42
  - gradient_accumulation_steps: 4
43
- - total_train_batch_size: 128
44
  - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
45
  - lr_scheduler_type: cosine
46
  - lr_scheduler_warmup_steps: 20
 
36
 
37
  The following hyperparameters were used during training:
38
  - learning_rate: 0.0002
39
+ - train_batch_size: 16
40
  - eval_batch_size: 8
41
  - seed: 42
42
  - gradient_accumulation_steps: 4
43
+ - total_train_batch_size: 64
44
  - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
45
  - lr_scheduler_type: cosine
46
  - lr_scheduler_warmup_steps: 20
model_fold_0_batch0/adapter_config.json CHANGED
@@ -40,8 +40,8 @@
40
  "rank_pattern": {},
41
  "revision": null,
42
  "target_modules": [
43
- "v_proj",
44
  "q_proj",
 
45
  "k_proj"
46
  ],
47
  "task_type": "SEQ_CLS",
 
40
  "rank_pattern": {},
41
  "revision": null,
42
  "target_modules": [
 
43
  "q_proj",
44
+ "v_proj",
45
  "k_proj"
46
  ],
47
  "task_type": "SEQ_CLS",
model_fold_0_batch0/adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d2e88a260e9d2641c42dfd7058e465462713bcdae4023b5d890356f1e64a778f
3
  size 4780504
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1be428f0b31a3975a1449bdd88293a28c3058a9fd4c06e3a71f730300c3f8572
3
  size 4780504
model_fold_0_batch0/training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:b093f9a99f5bcfbe28a31dcc057e5e9dff06005a60705aed8ee673e56a7ec907
3
  size 5496
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cdbefeb2bdec2011894dc6af7b992e7a7a555c67bc61e6df035bb68e9a5a4817
3
  size 5496