sharpenb's picture
Upload folder using huggingface_hub (#1)
a6718be verified
raw
history blame
1.61 kB
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_token_generation_latency_sync": 79.16311569213867,
"base_token_generation_latency_async": 79.15753591805696,
"base_token_generation_throughput_sync": 0.012632145554868622,
"base_token_generation_throughput_async": 0.012633035988325728,
"base_token_generation_CO2_emissions": null,
"base_token_generation_energy_consumption": null,
"base_inference_latency_sync": 77.93448944091797,
"base_inference_latency_async": 77.57904529571533,
"base_inference_throughput_sync": 0.01283128955066933,
"base_inference_throughput_async": 0.012890078708602383,
"base_inference_CO2_emissions": null,
"base_inference_energy_consumption": null,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_token_generation_latency_sync": 152.54314575195312,
"smashed_token_generation_latency_async": 152.70157847553492,
"smashed_token_generation_throughput_sync": 0.006555522341371384,
"smashed_token_generation_throughput_async": 0.006548720779335068,
"smashed_token_generation_CO2_emissions": null,
"smashed_token_generation_energy_consumption": null,
"smashed_inference_latency_sync": 153.68970184326173,
"smashed_inference_latency_async": 153.36310863494873,
"smashed_inference_throughput_sync": 0.00650661682602414,
"smashed_inference_throughput_async": 0.00652047294098809,
"smashed_inference_CO2_emissions": null,
"smashed_inference_energy_consumption": null
}