IlyasMoutawwakil HF Staff commited on
Commit
1c3d23d
·
verified ·
1 Parent(s): 3a955f9

Upload cpu_inference_transformers_fill-mask_hf-internal-testing/tiny-random-BertModel/benchmark.json with huggingface_hub

Browse files
cpu_inference_transformers_fill-mask_hf-internal-testing/tiny-random-BertModel/benchmark.json ADDED
@@ -0,0 +1,598 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config": {
3
+ "name": "cpu_inference_transformers_fill-mask_hf-internal-testing/tiny-random-BertModel",
4
+ "backend": {
5
+ "name": "pytorch",
6
+ "version": "2.5.1",
7
+ "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
+ "model": "hf-internal-testing/tiny-random-BertModel",
9
+ "processor": "hf-internal-testing/tiny-random-BertModel",
10
+ "task": "fill-mask",
11
+ "library": "transformers",
12
+ "model_type": "bert",
13
+ "device": "cpu",
14
+ "device_ids": null,
15
+ "seed": 42,
16
+ "inter_op_num_threads": null,
17
+ "intra_op_num_threads": null,
18
+ "model_kwargs": {},
19
+ "processor_kwargs": {},
20
+ "no_weights": true,
21
+ "tp_plan": null,
22
+ "device_map": null,
23
+ "torch_dtype": null,
24
+ "eval_mode": true,
25
+ "to_bettertransformer": false,
26
+ "low_cpu_mem_usage": null,
27
+ "attn_implementation": null,
28
+ "cache_implementation": null,
29
+ "allow_tf32": false,
30
+ "autocast_enabled": false,
31
+ "autocast_dtype": null,
32
+ "torch_compile": false,
33
+ "torch_compile_target": "forward",
34
+ "torch_compile_config": {},
35
+ "quantization_scheme": null,
36
+ "quantization_config": {},
37
+ "deepspeed_inference": false,
38
+ "deepspeed_inference_config": {},
39
+ "peft_type": null,
40
+ "peft_config": {}
41
+ },
42
+ "scenario": {
43
+ "name": "inference",
44
+ "_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario",
45
+ "iterations": 1,
46
+ "duration": 1,
47
+ "warmup_runs": 1,
48
+ "input_shapes": {
49
+ "batch_size": 2,
50
+ "sequence_length": 16,
51
+ "num_choices": 2
52
+ },
53
+ "new_tokens": null,
54
+ "memory": true,
55
+ "latency": true,
56
+ "energy": true,
57
+ "forward_kwargs": {},
58
+ "generate_kwargs": {
59
+ "max_new_tokens": 2,
60
+ "min_new_tokens": 2
61
+ },
62
+ "call_kwargs": {
63
+ "num_inference_steps": 2
64
+ }
65
+ },
66
+ "launcher": {
67
+ "name": "process",
68
+ "_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher",
69
+ "device_isolation": false,
70
+ "device_isolation_action": null,
71
+ "numactl": false,
72
+ "numactl_kwargs": {},
73
+ "start_method": "spawn"
74
+ },
75
+ "environment": {
76
+ "cpu": " AMD EPYC 7742 64-Core Processor",
77
+ "cpu_count": 128,
78
+ "cpu_ram_mb": 540671.643648,
79
+ "system": "Linux",
80
+ "machine": "x86_64",
81
+ "platform": "Linux-5.4.0-166-generic-x86_64-with-glibc2.31",
82
+ "processor": "x86_64",
83
+ "python_version": "3.10.18",
84
+ "gpu": [
85
+ "NVIDIA A100-SXM4-80GB",
86
+ "NVIDIA A100-SXM4-80GB",
87
+ "NVIDIA A100-SXM4-80GB",
88
+ "NVIDIA DGX Display",
89
+ "NVIDIA A100-SXM4-80GB"
90
+ ],
91
+ "gpu_count": 5,
92
+ "gpu_vram_mb": 347892350976,
93
+ "optimum_benchmark_version": "0.6.0.dev0",
94
+ "optimum_benchmark_commit": "61a08086def388b3e78bbf6b42ed20ab4af3f8db",
95
+ "transformers_version": "4.45.1",
96
+ "transformers_commit": "61a08086def388b3e78bbf6b42ed20ab4af3f8db",
97
+ "accelerate_version": "1.10.0",
98
+ "accelerate_commit": "61a08086def388b3e78bbf6b42ed20ab4af3f8db",
99
+ "diffusers_version": "0.34.0",
100
+ "diffusers_commit": "61a08086def388b3e78bbf6b42ed20ab4af3f8db",
101
+ "optimum_version": null,
102
+ "optimum_commit": null,
103
+ "timm_version": "1.0.19",
104
+ "timm_commit": "61a08086def388b3e78bbf6b42ed20ab4af3f8db",
105
+ "peft_version": "0.17.0",
106
+ "peft_commit": "61a08086def388b3e78bbf6b42ed20ab4af3f8db"
107
+ },
108
+ "print_report": true,
109
+ "log_report": true
110
+ },
111
+ "report": {
112
+ "load_model": {
113
+ "memory": {
114
+ "unit": "MB",
115
+ "max_ram": 679.559168,
116
+ "max_global_vram": null,
117
+ "max_process_vram": null,
118
+ "max_reserved": null,
119
+ "max_allocated": null
120
+ },
121
+ "latency": {
122
+ "unit": "s",
123
+ "values": [
124
+ 0.14881402999162674
125
+ ],
126
+ "count": 1,
127
+ "total": 0.14881402999162674,
128
+ "mean": 0.14881402999162674,
129
+ "p50": 0.14881402999162674,
130
+ "p90": 0.14881402999162674,
131
+ "p95": 0.14881402999162674,
132
+ "p99": 0.14881402999162674,
133
+ "stdev": 0,
134
+ "stdev_": 0
135
+ },
136
+ "throughput": null,
137
+ "energy": {
138
+ "unit": "kWh",
139
+ "cpu": 0.00015839945019979496,
140
+ "ram": 0.0002658581267750873,
141
+ "gpu": 0.00040478087937856344,
142
+ "total": 0.0008290384563534457
143
+ },
144
+ "efficiency": null
145
+ },
146
+ "first_forward": {
147
+ "memory": {
148
+ "unit": "MB",
149
+ "max_ram": 680.267776,
150
+ "max_global_vram": null,
151
+ "max_process_vram": null,
152
+ "max_reserved": null,
153
+ "max_allocated": null
154
+ },
155
+ "latency": {
156
+ "unit": "s",
157
+ "values": [
158
+ 0.008422935847193003
159
+ ],
160
+ "count": 1,
161
+ "total": 0.008422935847193003,
162
+ "mean": 0.008422935847193003,
163
+ "p50": 0.008422935847193003,
164
+ "p90": 0.008422935847193003,
165
+ "p95": 0.008422935847193003,
166
+ "p99": 0.008422935847193003,
167
+ "stdev": 0,
168
+ "stdev_": 0
169
+ },
170
+ "throughput": null,
171
+ "energy": {
172
+ "unit": "kWh",
173
+ "cpu": 0.0001545904000668088,
174
+ "ram": 0.0002594645603005972,
175
+ "gpu": 0.00038416502955840315,
176
+ "total": 0.000798219989925809
177
+ },
178
+ "efficiency": null
179
+ },
180
+ "forward": {
181
+ "memory": {
182
+ "unit": "MB",
183
+ "max_ram": 687.833088,
184
+ "max_global_vram": null,
185
+ "max_process_vram": null,
186
+ "max_reserved": null,
187
+ "max_allocated": null
188
+ },
189
+ "latency": {
190
+ "unit": "s",
191
+ "values": [
192
+ 0.002831811085343361,
193
+ 0.0027529229409992695,
194
+ 0.0027246088720858097,
195
+ 0.0028367098420858383,
196
+ 0.0027337572537362576,
197
+ 0.002710293047130108,
198
+ 0.0027137091383337975,
199
+ 0.002639930695295334,
200
+ 0.0026258742436766624,
201
+ 0.002674704883247614,
202
+ 0.002695303875952959,
203
+ 0.0027544950135052204,
204
+ 0.0027153408154845238,
205
+ 0.002661501057446003,
206
+ 0.0026737446896731853,
207
+ 0.002680666744709015,
208
+ 0.0026717609725892544,
209
+ 0.0026349108666181564,
210
+ 0.0027218740433454514,
211
+ 0.0026273271068930626,
212
+ 0.0025601410306990147,
213
+ 0.0025830939412117004,
214
+ 0.0026146029122173786,
215
+ 0.0025855777785182,
216
+ 0.0025835949927568436,
217
+ 0.0025653610937297344,
218
+ 0.0026709591038525105,
219
+ 0.0026035429909825325,
220
+ 0.002610635943710804,
221
+ 0.002631494775414467,
222
+ 0.002649869304150343,
223
+ 0.002667392138391733,
224
+ 0.0026115477085113525,
225
+ 0.002584747038781643,
226
+ 0.0026388089172542095,
227
+ 0.00255892938002944,
228
+ 0.0025786347687244415,
229
+ 0.0025820820592343807,
230
+ 0.002565390896052122,
231
+ 0.002562745939940214,
232
+ 0.0026137609966099262,
233
+ 0.0026076999492943287,
234
+ 0.002572102937847376,
235
+ 0.002562555018812418,
236
+ 0.002585679292678833,
237
+ 0.0026052147150039673,
238
+ 0.0025855586864054203,
239
+ 0.0025789663195610046,
240
+ 0.0026012081652879715,
241
+ 0.00263608293607831,
242
+ 0.002570010256022215,
243
+ 0.002641713712364435,
244
+ 0.0026368550024926662,
245
+ 0.0026613399386405945,
246
+ 0.0026696957647800446,
247
+ 0.002653926145285368,
248
+ 0.0026658792048692703,
249
+ 0.0027573509141802788,
250
+ 0.002686257939785719,
251
+ 0.002671870868653059,
252
+ 0.002650290261954069,
253
+ 0.0025801178999245167,
254
+ 0.0025781039148569107,
255
+ 0.0025948258116841316,
256
+ 0.0026422650553286076,
257
+ 0.002637235913425684,
258
+ 0.0026484872214496136,
259
+ 0.002593553625047207,
260
+ 0.0025963387452065945,
261
+ 0.0025863200426101685,
262
+ 0.0025483090430498123,
263
+ 0.002589415293186903,
264
+ 0.0027642627246677876,
265
+ 0.002826390787959099,
266
+ 0.002728687133640051,
267
+ 0.0027176570147275925,
268
+ 0.0026821200735867023,
269
+ 0.002717094961553812,
270
+ 0.0027332347817718983,
271
+ 0.002709541004151106,
272
+ 0.0028211409226059914,
273
+ 0.0027777189388871193,
274
+ 0.0027347891591489315,
275
+ 0.0027413908392190933,
276
+ 0.002716194372624159,
277
+ 0.0026784627698361874,
278
+ 0.0026074089109897614,
279
+ 0.0027065258473157883,
280
+ 0.002648227382451296,
281
+ 0.002652904950082302,
282
+ 0.002605196088552475,
283
+ 0.0025926423259079456,
284
+ 0.002583986148238182,
285
+ 0.0026267259381711483,
286
+ 0.0025806291960179806,
287
+ 0.002675727941095829,
288
+ 0.0026479260995984077,
289
+ 0.00265714293345809,
290
+ 0.0026852251030504704,
291
+ 0.0026663709431886673,
292
+ 0.0026177302934229374,
293
+ 0.002601148094981909,
294
+ 0.002701906953006983,
295
+ 0.0025688670575618744,
296
+ 0.0025847270153462887,
297
+ 0.0026042540557682514,
298
+ 0.002581731416285038,
299
+ 0.0025981119833886623,
300
+ 0.0025748591870069504,
301
+ 0.002553307916969061,
302
+ 0.0026549389585852623,
303
+ 0.002589976880699396,
304
+ 0.0025946958921849728,
305
+ 0.0025792871601879597,
306
+ 0.002557766158133745,
307
+ 0.002560011111199856,
308
+ 0.0026106061413884163,
309
+ 0.00258822413161397,
310
+ 0.0026662801392376423,
311
+ 0.0026261350139975548,
312
+ 0.0026178103871643543,
313
+ 0.0025977017357945442,
314
+ 0.00260597700253129,
315
+ 0.002601419109851122,
316
+ 0.002598542720079422,
317
+ 0.0026544490829110146,
318
+ 0.00261505413800478,
319
+ 0.002621215768158436,
320
+ 0.002593092620372772,
321
+ 0.00259593827649951,
322
+ 0.0026677530258893967,
323
+ 0.002680005971342325,
324
+ 0.0026747160591185093,
325
+ 0.0028274422511458397,
326
+ 0.002778911031782627,
327
+ 0.0027624801732599735,
328
+ 0.002755246590822935,
329
+ 0.0027657868340611458,
330
+ 0.002757551148533821,
331
+ 0.002725029829889536,
332
+ 0.00284017575904727,
333
+ 0.0027232267893850803,
334
+ 0.00271938880905509,
335
+ 0.0026777819730341434,
336
+ 0.002665778622031212,
337
+ 0.0026338500902056694,
338
+ 0.002633708994835615,
339
+ 0.0026066978462040424,
340
+ 0.002645730972290039,
341
+ 0.002599785104393959,
342
+ 0.002630883827805519,
343
+ 0.0026519829407334328,
344
+ 0.00270979106426239,
345
+ 0.002751339226961136,
346
+ 0.0026912870816886425,
347
+ 0.0027004238218069077,
348
+ 0.0026159859262406826,
349
+ 0.0025703287683427334,
350
+ 0.0025942251086235046,
351
+ 0.002606278285384178,
352
+ 0.002604664769023657,
353
+ 0.0025924108922481537,
354
+ 0.00259727006778121,
355
+ 0.0026735030114650726,
356
+ 0.002579587046056986,
357
+ 0.0025506732054054737,
358
+ 0.002545243129134178,
359
+ 0.0025887549854815006,
360
+ 0.0025591892190277576,
361
+ 0.0025789160281419754,
362
+ 0.0025969198904931545,
363
+ 0.0026767291128635406,
364
+ 0.0026505510322749615,
365
+ 0.0026231491938233376,
366
+ 0.0025825733318924904,
367
+ 0.002604193054139614,
368
+ 0.0026407232508063316,
369
+ 0.002597931306809187,
370
+ 0.0026321657933294773,
371
+ 0.002742283046245575,
372
+ 0.0027189780957996845,
373
+ 0.0027602771297097206,
374
+ 0.0027607372030615807,
375
+ 0.0026779319159686565,
376
+ 0.0026023900136351585,
377
+ 0.002625253051519394,
378
+ 0.0026576551608741283,
379
+ 0.002606980036944151,
380
+ 0.002618159167468548,
381
+ 0.0025986325927078724,
382
+ 0.00259243231266737,
383
+ 0.002606297843158245,
384
+ 0.002581820823252201,
385
+ 0.0026193922385573387,
386
+ 0.002694672904908657,
387
+ 0.002622256986796856,
388
+ 0.0026164259761571884,
389
+ 0.0026205149479210377,
390
+ 0.0026259650476276875,
391
+ 0.002641574013978243,
392
+ 0.0026219268329441547,
393
+ 0.002665828913450241,
394
+ 0.0026539969258010387,
395
+ 0.0026247319765388966,
396
+ 0.002590416930615902,
397
+ 0.0026114871725440025,
398
+ 0.002616936806589365,
399
+ 0.002601799089461565,
400
+ 0.0026082410477101803,
401
+ 0.002683452796190977,
402
+ 0.0026596570387482643,
403
+ 0.002697909716516733,
404
+ 0.0027176872827112675,
405
+ 0.0027074976824223995,
406
+ 0.0027302498929202557,
407
+ 0.0027610273100435734,
408
+ 0.002785805147141218,
409
+ 0.00276709022000432,
410
+ 0.0027224458754062653,
411
+ 0.0026900148950517178,
412
+ 0.002702157013118267,
413
+ 0.002749084960669279,
414
+ 0.0027403589338064194,
415
+ 0.0027441363781690598,
416
+ 0.0027553769759833813,
417
+ 0.002630012109875679,
418
+ 0.0026188911870121956,
419
+ 0.0026341299526393414,
420
+ 0.002647495362907648,
421
+ 0.002632095944136381,
422
+ 0.002622507978230715,
423
+ 0.002701836172491312,
424
+ 0.0026323669590055943,
425
+ 0.0026261149905622005,
426
+ 0.0025915498845279217,
427
+ 0.002607117872685194,
428
+ 0.0026581836864352226,
429
+ 0.002740319352596998,
430
+ 0.002685976680368185,
431
+ 0.0026982896961271763,
432
+ 0.002615605015307665,
433
+ 0.002637697383761406,
434
+ 0.00261252885684371,
435
+ 0.002633138094097376,
436
+ 0.002654068171977997,
437
+ 0.0026060566306114197,
438
+ 0.002590127754956484,
439
+ 0.0026873089373111725,
440
+ 0.002620665356516838,
441
+ 0.0026073800399899483,
442
+ 0.002676489297300577,
443
+ 0.0026698061265051365,
444
+ 0.0026501002721488476,
445
+ 0.0026447600685060024,
446
+ 0.0026764790527522564,
447
+ 0.00260948296636343,
448
+ 0.0026153340004384518,
449
+ 0.0026255641132593155,
450
+ 0.0026244018226861954,
451
+ 0.0026540779508650303,
452
+ 0.0026348107494413853,
453
+ 0.0026372959837317467,
454
+ 0.002701575867831707,
455
+ 0.0026408019475638866,
456
+ 0.0026236302219331264,
457
+ 0.002644078340381384,
458
+ 0.0026295706629753113,
459
+ 0.002660319209098816,
460
+ 0.0026665409095585346,
461
+ 0.0026920479722321033,
462
+ 0.0026878509670495987,
463
+ 0.002672250848263502,
464
+ 0.002675988245755434,
465
+ 0.0026738038286566734,
466
+ 0.002720171120017767,
467
+ 0.0027068760246038437,
468
+ 0.0027110143564641476,
469
+ 0.00283443508669734,
470
+ 0.0027134385891258717,
471
+ 0.0027259723283350468,
472
+ 0.0026616910472512245,
473
+ 0.00266280397772789,
474
+ 0.002625504042953253,
475
+ 0.0026272074319422245,
476
+ 0.002691878005862236,
477
+ 0.002691968809813261,
478
+ 0.0026845536194741726,
479
+ 0.0026857461780309677,
480
+ 0.0026493482291698456,
481
+ 0.0026848549023270607,
482
+ 0.0026484858244657516,
483
+ 0.0026492690667510033,
484
+ 0.0027198009192943573,
485
+ 0.002644097898155451,
486
+ 0.002560941968113184,
487
+ 0.0025781849399209023,
488
+ 0.0025786557234823704,
489
+ 0.002567645162343979,
490
+ 0.0025629960000514984,
491
+ 0.0025580767542123795,
492
+ 0.002608812879770994,
493
+ 0.002560561988502741,
494
+ 0.0025816713459789753,
495
+ 0.0025675445795059204,
496
+ 0.0025757201947271824,
497
+ 0.0025894762948155403,
498
+ 0.002610113937407732,
499
+ 0.0026615308597683907,
500
+ 0.0026047746650874615,
501
+ 0.002584496047347784,
502
+ 0.0025961389765143394,
503
+ 0.00257821474224329,
504
+ 0.0026288102380931377,
505
+ 0.0026331888511776924,
506
+ 0.002625604160130024,
507
+ 0.0026751761324703693,
508
+ 0.0025922409258782864,
509
+ 0.002580889966338873,
510
+ 0.002572744619101286,
511
+ 0.002562996931374073,
512
+ 0.0025603421963751316,
513
+ 0.002570711076259613,
514
+ 0.002588694915175438,
515
+ 0.002619482111185789,
516
+ 0.0025880038738250732,
517
+ 0.002567254938185215,
518
+ 0.0025802291929721832,
519
+ 0.0025844271294772625,
520
+ 0.002586841117590666,
521
+ 0.0025745779275894165,
522
+ 0.0025813300162553787,
523
+ 0.002612078096717596,
524
+ 0.0025806589983403683,
525
+ 0.0025669229216873646,
526
+ 0.0025774440728127956,
527
+ 0.0025810301303863525,
528
+ 0.0025612828321754932,
529
+ 0.002566984388977289,
530
+ 0.0025728149339556694,
531
+ 0.0026128599420189857,
532
+ 0.0025754603557288647,
533
+ 0.002572544850409031,
534
+ 0.002579357009381056,
535
+ 0.002562706358730793,
536
+ 0.0025800177827477455,
537
+ 0.0025769220665097237,
538
+ 0.0026219473220407963,
539
+ 0.0025798678398132324,
540
+ 0.0025990940630435944,
541
+ 0.002597521059215069,
542
+ 0.002593433950096369,
543
+ 0.00259942514821887,
544
+ 0.002599504776299,
545
+ 0.0025898870080709457,
546
+ 0.002639380283653736,
547
+ 0.002604834735393524,
548
+ 0.0025976509787142277,
549
+ 0.002606858965009451,
550
+ 0.002601419109851122,
551
+ 0.0026102247647941113,
552
+ 0.0026018288917839527,
553
+ 0.002616487443447113,
554
+ 0.0026577343232929707,
555
+ 0.0026196730323135853,
556
+ 0.0026202131994068623,
557
+ 0.002624782267957926,
558
+ 0.0026039928197860718,
559
+ 0.002626976929605007,
560
+ 0.0026326170191168785,
561
+ 0.0026807766407728195,
562
+ 0.0026374259032309055,
563
+ 0.002629511058330536,
564
+ 0.002639159094542265,
565
+ 0.0026420247741043568,
566
+ 0.002641173079609871,
567
+ 0.0026397807523608208,
568
+ 0.002645079977810383,
569
+ 0.002682761289179325
570
+ ],
571
+ "count": 378,
572
+ "total": 0.998018033336848,
573
+ "mean": 0.002640259347451979,
574
+ "p50": 0.0026264304760843515,
575
+ "p90": 0.0027236414141952993,
576
+ "p95": 0.0027556730667129157,
577
+ "p99": 0.0028284470830112697,
578
+ "stdev": 5.993870029621387e-05,
579
+ "stdev_": 2.270182296828476
580
+ },
581
+ "throughput": {
582
+ "unit": "samples/s",
583
+ "value": 757.5013424079455
584
+ },
585
+ "energy": {
586
+ "unit": "kWh",
587
+ "cpu": 8.709281649729789e-08,
588
+ "ram": 1.461470135525111e-07,
589
+ "gpu": 2.1126912600919842e-07,
590
+ "total": 4.4450895605900725e-07
591
+ },
592
+ "efficiency": {
593
+ "unit": "samples/kWh",
594
+ "value": 4499346.914698623
595
+ }
596
+ }
597
+ }
598
+ }