Ministral-3B-lora-BF16 / Ministral-3B-INT8-lora-BF16-Evaluation_Results.json
HabibAhmed's picture
Upload folder using huggingface_hub
af39ef1 verified
{
"eval_loss:": 1.329510751245674,
"perplexity:": 3.7791939692510548,
"performance_metrics:": {
"accuracy:": 0.9993319973279893,
"precision:": 1.0,
"recall:": 1.0,
"f1:": 1.0,
"bleu:": 0.9535680157347896,
"rouge:": {
"rouge1": 0.9747836415742681,
"rouge2": 0.974612051335199,
"rougeL": 0.9747836415742681
},
"semantic_similarity_avg:": 0.9974035620689392
},
"mauve:": 0.8685080885635668,
"inference_performance:": {
"min_latency_ms": 80.57045936584473,
"max_latency_ms": 4091.404914855957,
"lower_quartile_ms": 83.80317687988281,
"median_latency_ms": 97.87654876708984,
"upper_quartile_ms": 3127.7430057525635,
"avg_latency_ms": 1141.5406608071896,
"min_memory_gb": 0.1100916862487793,
"max_memory_gb": 0.286313533782959,
"lower_quartile_gb": 0.13838720321655273,
"median_memory_gb": 0.18627500534057617,
"upper_quartile_gb": 0.26760435104370117,
"avg_memory_gb": 0.197617789786421,
"model_load_memory_gb": 6.765058517456055,
"avg_inference_memory_gb": 0.197617789786421
}
}