IlyasMoutawwakil HF staff commited on
Commit
126dff9
·
verified ·
1 Parent(s): af7bb11

Upload cpu_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cpu_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -80,7 +80,7 @@
80
  "processor": "x86_64",
81
  "python_version": "3.10.14",
82
  "optimum_benchmark_version": "0.2.0",
83
- "optimum_benchmark_commit": "e14790977359c910adbb4ec8a2b60fcf3ce6bd46",
84
  "transformers_version": "4.40.2",
85
  "transformers_commit": null,
86
  "accelerate_version": "0.30.1",
@@ -99,7 +99,7 @@
99
  "forward": {
100
  "memory": {
101
  "unit": "MB",
102
- "max_ram": 942.235648,
103
  "max_global_vram": null,
104
  "max_process_vram": null,
105
  "max_reserved": null,
@@ -108,52 +108,52 @@
108
  "latency": {
109
  "unit": "s",
110
  "count": 22,
111
- "total": 1.0167197729998065,
112
- "mean": 0.046214535136354845,
113
- "stdev": 0.0009545879214664809,
114
- "p50": 0.04623785249998491,
115
- "p90": 0.047401709199994,
116
- "p95": 0.04789441300000874,
117
- "p99": 0.047951871630004916,
118
  "values": [
119
- 0.04796064900000374,
120
- 0.046268268999995144,
121
- 0.046656853000001774,
122
- 0.04457056899997269,
123
- 0.04791885200000934,
124
- 0.046417766999979904,
125
- 0.045738072000006014,
126
- 0.04620261699994899,
127
- 0.043995709000000716,
128
- 0.04554459199999883,
129
- 0.04694472900001756,
130
- 0.04500633099996776,
131
- 0.04581982400003426,
132
- 0.04612738699995589,
133
- 0.047146443999963594,
134
- 0.04681222199997137,
135
- 0.04743007199999738,
136
- 0.045534714000041276,
137
- 0.045762847999981204,
138
- 0.046348889999990206,
139
- 0.04630492699999422,
140
- 0.04620743599997468
141
  ]
142
  },
143
  "throughput": {
144
  "unit": "samples/s",
145
- "value": 21.638213974229636
146
  },
147
  "energy": {
148
  "unit": "kWh",
149
- "cpu": 1.5208998806456215e-06,
150
- "ram": 6.356065201863167e-08,
151
  "gpu": 0.0,
152
- "total": 1.5844605326642532e-06
153
  },
154
  "efficiency": {
155
  "unit": "samples/kWh",
156
- "value": 631129.6364817057
157
  }
158
  }
159
  }
 
80
  "processor": "x86_64",
81
  "python_version": "3.10.14",
82
  "optimum_benchmark_version": "0.2.0",
83
+ "optimum_benchmark_commit": "b41c364fc95b27c894c71df85ec2bf4c8adb0e4d",
84
  "transformers_version": "4.40.2",
85
  "transformers_commit": null,
86
  "accelerate_version": "0.30.1",
 
99
  "forward": {
100
  "memory": {
101
  "unit": "MB",
102
+ "max_ram": 942.497792,
103
  "max_global_vram": null,
104
  "max_process_vram": null,
105
  "max_reserved": null,
 
108
  "latency": {
109
  "unit": "s",
110
  "count": 22,
111
+ "total": 1.0308161320000409,
112
+ "mean": 0.04685527872727458,
113
+ "stdev": 0.0010907057149416918,
114
+ "p50": 0.04704940450001516,
115
+ "p90": 0.04812063139999338,
116
+ "p95": 0.04814266629999366,
117
+ "p99": 0.04880060423998173,
118
  "values": [
119
+ 0.04812950499999147,
120
+ 0.046657762000023695,
121
+ 0.04714928499998905,
122
+ 0.04493485399999031,
123
+ 0.04814335899999378,
124
+ 0.046655428000008214,
125
+ 0.045894365999998854,
126
+ 0.04711844800002041,
127
+ 0.044404659999997875,
128
+ 0.04745119500000783,
129
+ 0.048975314999978536,
130
+ 0.04716366200000266,
131
+ 0.04799852200000032,
132
+ 0.04804076900001064,
133
+ 0.04770334400001275,
134
+ 0.04698036100000991,
135
+ 0.04713560900000857,
136
+ 0.0462630499999932,
137
+ 0.045599869000000126,
138
+ 0.04636048100002199,
139
+ 0.04591160799998306,
140
+ 0.046144679999997607
141
  ]
142
  },
143
  "throughput": {
144
  "unit": "samples/s",
145
+ "value": 21.342312481387445
146
  },
147
  "energy": {
148
  "unit": "kWh",
149
+ "cpu": 1.5420885167570197e-06,
150
+ "ram": 6.444655200697779e-08,
151
  "gpu": 0.0,
152
+ "total": 1.6065350687639974e-06
153
  },
154
  "efficiency": {
155
  "unit": "samples/kWh",
156
+ "value": 622457.6228948175
157
  }
158
  }
159
  }