sharpenb commited on
Commit
06706a3
·
verified ·
1 Parent(s): 5fe5153

Upload folder using huggingface_hub

Browse files
Files changed (6) hide show
  1. .gitattributes +1 -0
  2. banner.png +3 -0
  3. base_results.json +9 -9
  4. config.json +1 -1
  5. plots.png +0 -0
  6. smashed_results.json +9 -9
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ banner.png filter=lfs diff=lfs merge=lfs -text
banner.png CHANGED

Git LFS Details

  • SHA256: e685ae8397a2a718e10e56d5f0b608398bd5674d2bff527a2f224592a2016f2c
  • Pointer size: 131 Bytes
  • Size of remote file: 489 kB
base_results.json CHANGED
@@ -1,13 +1,13 @@
1
  {
2
  "perplexity_y_gt": 38109.7109375,
3
- "inference_elapsed_time_ms_@1": 398.0410270690918,
4
- "inference_latency_ms_@1": 39.80410270690918,
5
- "inference_throughput_batches_per_ms_@1": 0.02512303838032305,
6
- "Loading model_emissions": 7.3388832823058995e-06,
7
- "Loading model_energy_consumed": 2.5236441197042344e-05,
8
- "Inference_emissions": 1.7892046241417072e-05,
9
- "Inference_energy_consumed": 6.152592369944487e-05,
10
- "tracker_emissions": 2.896018336263697e-05,
11
- "tracker_energy_consumed": 9.958626351897996e-05,
12
  "disk_memory": 3158.1982421875
13
  }
 
1
  {
2
  "perplexity_y_gt": 38109.7109375,
3
+ "inference_elapsed_time_ms_@1": 411.52214431762695,
4
+ "inference_latency_ms_@1": 41.152214431762694,
5
+ "inference_throughput_batches_per_ms_@1": 0.024300028900222818,
6
+ "Loading model_emissions": 6.982773212868697e-06,
7
+ "Loading model_energy_consumed": 2.4011874668140837e-05,
8
+ "Inference_emissions": 1.743701285954221e-05,
9
+ "Inference_energy_consumed": 5.996118656674475e-05,
10
+ "tracker_emissions": 2.925166457244822e-05,
11
+ "tracker_energy_consumed": 0.00010058858882222871,
12
  "disk_memory": 3158.1982421875
13
  }
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "/tmp/models/tmpsyoybu74tgrh01ji",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
 
1
  {
2
+ "_name_or_path": "/tmp/models/tmp2ydkucwg5lc33vlc",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
plots.png ADDED
smashed_results.json CHANGED
@@ -1,13 +1,13 @@
1
  {
2
  "perplexity_y_gt": 22252.1484375,
3
- "inference_elapsed_time_ms_@1": 1357.9652862548828,
4
- "inference_latency_ms_@1": 135.7965286254883,
5
- "inference_throughput_batches_per_ms_@1": 0.007363958490852802,
6
- "Loading model_emissions": 1.1571457192724436e-05,
7
- "Loading model_energy_consumed": 3.9791121860781054e-05,
8
- "Inference_emissions": 3.394370871224601e-05,
9
- "Inference_energy_consumed": 0.00011672326374115302,
10
- "tracker_emissions": 4.893639156223708e-05,
11
- "tracker_energy_consumed": 0.0001682790583457543,
12
  "disk_memory": 3150.1982421875
13
  }
 
1
  {
2
  "perplexity_y_gt": 22252.1484375,
3
+ "inference_elapsed_time_ms_@1": 1430.5411987304688,
4
+ "inference_latency_ms_@1": 143.0541198730469,
5
+ "inference_throughput_batches_per_ms_@1": 0.006990361416276918,
6
+ "Loading model_emissions": 1.3689458760454972e-05,
7
+ "Loading model_energy_consumed": 4.707435828288706e-05,
8
+ "Inference_emissions": 3.829836649971085e-05,
9
+ "Inference_energy_consumed": 0.00013169775794677133,
10
+ "tracker_emissions": 5.627399208629691e-05,
11
+ "tracker_energy_consumed": 0.00019351108848299342,
12
  "disk_memory": 3150.1982421875
13
  }