sharpenb's picture
Upload folder using huggingface_hub (#2)
4fe92f8 verified
raw
history blame
No virus
1.7 kB
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_perplexity": 8.629734992980957,
"base_token_generation_latency_sync": 38.651375961303714,
"base_token_generation_latency_async": 38.82457036525011,
"base_token_generation_throughput_sync": 0.025872300147895433,
"base_token_generation_throughput_async": 0.02575688515268282,
"base_token_generation_CO2_emissions": null,
"base_token_generation_energy_consumption": null,
"base_inference_latency_sync": 119.93804702758788,
"base_inference_latency_async": 39.46812152862549,
"base_inference_throughput_sync": 0.00833763784539515,
"base_inference_throughput_async": 0.025336903842122782,
"base_inference_CO2_emissions": null,
"base_inference_energy_consumption": null,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_perplexity": 9.242453575134277,
"smashed_token_generation_latency_sync": 165.535498046875,
"smashed_token_generation_latency_async": 165.71880243718624,
"smashed_token_generation_throughput_sync": 0.00604100034010124,
"smashed_token_generation_throughput_async": 0.006034318286719687,
"smashed_token_generation_CO2_emissions": null,
"smashed_token_generation_energy_consumption": null,
"smashed_inference_latency_sync": 263.5079681396484,
"smashed_inference_latency_async": 194.88348960876465,
"smashed_inference_throughput_sync": 0.0037949516557694413,
"smashed_inference_throughput_async": 0.005131271007141419,
"smashed_inference_CO2_emissions": null,
"smashed_inference_energy_consumption": null
}