File size: 877 Bytes
4c3fb28
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
{
    "current_gpu_type": "Tesla T4",
    "current_gpu_total_memory": 15095.0625,
    "perplexity": 3.4586403369903564,
    "memory_inference_first": 810.0,
    "memory_inference": 808.0,
    "token_generation_latency_sync": 37.52931938171387,
    "token_generation_latency_async": 37.49031759798527,
    "token_generation_throughput_sync": 0.02664583361688273,
    "token_generation_throughput_async": 0.026673553708537804,
    "token_generation_CO2_emissions": 1.9795767676085165e-05,
    "token_generation_energy_consumption": 0.0018361529394885828,
    "inference_latency_sync": 122.5987979888916,
    "inference_latency_async": 48.247456550598145,
    "inference_throughput_sync": 0.008156686822415727,
    "inference_throughput_async": 0.020726481176293273,
    "inference_CO2_emissions": 1.9329542916591693e-05,
    "inference_energy_consumption": 6.80833538907916e-05
}