Spaces:
Running
Running
File size: 1,412 Bytes
b866cfe 0d4e8d1 f5894fd b866cfe |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 |
{
"config": {
"model_name": "meta-llama/Llama-3.1-70B-Instruct",
"link": "https://huggingface.co/meta-llama/Llama-3.1-70B-Instruct",
"Params": 70
},
"results": {
"Overall": {
"CR": "71.8",
"S-Acc": "27.4",
"EM": "0.5",
"PM-0.5": "21.9",
"Tokens": "2090"
},
"Acrostic": {
"CR": "84.0",
"S-Acc": "35.8",
"EM": "0.0",
"PM-0.5": "21.0",
"Tokens": "3565"
},
"Crossword": {
"CR": "77.3",
"S-Acc": "46.8",
"EM": "0.0",
"PM-0.5": "62.0",
"Tokens": "3072"
},
"Cryptogram": {
"CR": "62.0",
"S-Acc": "6.9",
"EM": "0.0",
"PM-0.5": "1.0",
"Tokens": "1298"
},
"Logic_Puzzle": {
"CR": "56.0",
"S-Acc": "22.8",
"EM": "2.0",
"PM-0.5": "18.0",
"Tokens": "1165"
},
"Sudoku": {
"CR": "69.5",
"S-Acc": "24.2",
"EM": "1.0",
"PM-0.5": "17.5",
"Tokens": "1940"
},
"Drop_Quote": {
"CR": "82.0",
"S-Acc": "27.7",
"EM": "0.0",
"PM-0.5": "12.0",
"Tokens": "1498"
}
}
} |