Ministral-3B-lora-BF16 / Ministral-3B-FP4-lora-BF16-Evaluation_Results.json
HabibAhmed's picture
Upload folder using huggingface_hub
8a2c83e verified
{
"eval_loss:": 1.3673138553469517,
"perplexity:": 3.924793959050075,
"performance_metrics:": {
"accuracy:": 0.9979959919839679,
"precision:": 1.0,
"recall:": 1.0,
"f1:": 1.0,
"bleu:": 0.9550047930990042,
"rouge:": {
"rouge1": 0.975429473673028,
"rouge2": 0.9751760579600208,
"rougeL": 0.9753904091892849
},
"semantic_similarity_avg:": 0.9966022968292236
},
"mauve:": 0.8685080885635668,
"inference_performance:": {
"min_latency_ms": 52.51646041870117,
"max_latency_ms": 2623.7964630126953,
"lower_quartile_ms": 53.77364158630371,
"median_latency_ms": 66.0865306854248,
"upper_quartile_ms": 1876.5153884887695,
"avg_latency_ms": 703.9993503687775,
"min_memory_gb": 0.17970037460327148,
"max_memory_gb": 0.18018865585327148,
"lower_quartile_gb": 0.17970037460327148,
"median_memory_gb": 0.18018865585327148,
"upper_quartile_gb": 0.18018865585327148,
"avg_memory_gb": 0.17997794797918998,
"model_load_memory_gb": 5.240259647369385,
"avg_inference_memory_gb": 0.17997794797918998
}
}