sharpenb's picture
Upload folder using huggingface_hub (#1)
6095c75 verified
raw
history blame
1.61 kB
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_token_generation_latency_sync": 52.690575790405276,
"base_token_generation_latency_async": 52.724254317581654,
"base_token_generation_throughput_sync": 0.018978725986556703,
"base_token_generation_throughput_async": 0.018966602997864224,
"base_token_generation_CO2_emissions": null,
"base_token_generation_energy_consumption": null,
"base_inference_latency_sync": 52.11729965209961,
"base_inference_latency_async": 52.47912406921387,
"base_inference_throughput_sync": 0.019187486816763993,
"base_inference_throughput_async": 0.01905519609437681,
"base_inference_CO2_emissions": null,
"base_inference_energy_consumption": null,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_token_generation_latency_sync": 87.81638412475586,
"smashed_token_generation_latency_async": 88.34282588213682,
"smashed_token_generation_throughput_sync": 0.01138739666824992,
"smashed_token_generation_throughput_async": 0.011319538287513654,
"smashed_token_generation_CO2_emissions": null,
"smashed_token_generation_energy_consumption": null,
"smashed_inference_latency_sync": 92.48798599243165,
"smashed_inference_latency_async": 85.88974475860596,
"smashed_inference_throughput_sync": 0.01081221511388334,
"smashed_inference_throughput_async": 0.01164283352815299,
"smashed_inference_CO2_emissions": null,
"smashed_inference_energy_consumption": null
}