Ministral-3B-lora-BF16 / Ministral-3B-NF4-lora-BF16-Evaluation_Results.json
HabibAhmed's picture
Upload folder using huggingface_hub
126d21a verified
{
"eval_loss:": 1.3651494408025093,
"perplexity:": 3.916308264514241,
"performance_metrics:": {
"accuracy:": 0.9993319973279893,
"precision:": 1.0,
"recall:": 1.0,
"f1:": 1.0,
"bleu:": 0.9566939232138714,
"rouge:": {
"rouge1": 0.9765294920135729,
"rouge2": 0.9763758313492451,
"rougeL": 0.9765294920135729
},
"semantic_similarity_avg:": 0.9972838163375854
},
"mauve:": 0.8685080885635668,
"inference_performance:": {
"min_latency_ms": 54.376840591430664,
"max_latency_ms": 2085.2065086364746,
"lower_quartile_ms": 55.29165267944336,
"median_latency_ms": 55.97376823425293,
"upper_quartile_ms": 1639.3218040466309,
"avg_latency_ms": 638.3094156911235,
"min_memory_gb": 0.17970037460327148,
"max_memory_gb": 0.18018865585327148,
"lower_quartile_gb": 0.17970037460327148,
"median_memory_gb": 0.18018865585327148,
"upper_quartile_gb": 0.18018865585327148,
"avg_memory_gb": 0.17997794797918998,
"model_load_memory_gb": 2.3215994834899902,
"avg_inference_memory_gb": 0.17997794797918998
}
}