trail_leaderboard / e3c_llm_results /Qwen /Qwen2.5-32B-Instruct_0_IT.json
Sfarzi's picture
Initial clone with modifications
f7a50a0
{
"average_CPS": 15.684724516666668,
"config": {
"model_name": "Qwen/Qwen2.5-32B-Instruct",
"num_fewshot": "0",
"batch_size": 1,
"LANG": "IT",
"model": "Qwen/Qwen2.5-32B-Instruct",
"base_model": "Qwen2ForCausalLM",
"revision": "5ede1c97bbab6ce5cda5812749b4c0bdf79b18dd",
"submitted_time": "2024-09-17 04:17:55+00:00",
"num_params_billion": 32.763876352,
"language": "en"
},
"tasks": {
"NER": {
"prompts": [
{
"prompt": "p1",
"metric": "f1",
"value": 37.580000000000005,
"stderr": 0.0
},
{
"prompt": "p2",
"metric": "f1",
"value": 16.470000000000002,
"stderr": 0.0
},
{
"prompt": "p3",
"metric": "f1",
"value": 27.96,
"stderr": 0.0
}
],
"average_accuracy": 27.336666666666673,
"best_prompt": 37.580000000000005,
"prompt_id": "p1",
"CPS": 33.730555333333335,
"is_dummy": false,
"std_accuracy": 10.568795264046578
},
"RE": {
"prompts": [
{
"prompt": "p1",
"metric": "f1",
"value": 45.050000000000004,
"stderr": 0.0
},
{
"prompt": "p2",
"metric": "f1",
"value": 41.589999999999996,
"stderr": 0.0
},
{
"prompt": "p3",
"metric": "f1",
"value": 44.47,
"stderr": 0.0
}
],
"average_accuracy": 43.70333333333334,
"best_prompt": 45.050000000000004,
"prompt_id": "p1",
"CPS": 44.44332666666667,
"is_dummy": false,
"std_accuracy": 1.8530335488957954
},
"RML": {
"prompts": [
{
"prompt": "p1",
"metric": "f1",
"value": 0.0,
"stderr": 0.0
},
{
"prompt": "p1",
"metric": "f1",
"value": 0.0,
"stderr": 0.0
},
{
"prompt": "p2",
"metric": "f1",
"value": 0.16999999999999998,
"stderr": 0.0
},
{
"prompt": "p3",
"metric": "f1",
"value": 0.0,
"stderr": 0.0
}
],
"average_accuracy": 0.042499999999999996,
"best_prompt": 0.16999999999999998,
"prompt_id": "p2",
"CPS": 0.16978324999999997,
"is_dummy": false,
"std_accuracy": 0.08499999999999999
},
"DIA": {
"prompts": [
{
"prompt": "p1",
"metric": "f1",
"value": 0.0,
"stderr": 0.0
},
{
"prompt": "p2",
"metric": "f1",
"value": 0.0,
"stderr": 0.0
},
{
"prompt": "p3",
"metric": "f1",
"value": 0.0,
"stderr": 0.0
}
],
"average_accuracy": 0.0,
"best_prompt": 0.0,
"prompt_id": "p1",
"CPS": 0.0,
"is_dummy": false,
"std_accuracy": 0.0
},
"HIS": {
"prompts": [
{
"prompt": "p1",
"metric": "f1",
"value": 0.0,
"stderr": 0.0
},
{
"prompt": "p2",
"metric": "f1",
"value": 0.0,
"stderr": 0.0
},
{
"prompt": "p3",
"metric": "f1",
"value": 0.08,
"stderr": 0.0
}
],
"average_accuracy": 0.02666666666666667,
"best_prompt": 0.08,
"prompt_id": "p3",
"CPS": 0.07995733333333332,
"is_dummy": false,
"std_accuracy": 0.046188021535170064
}
}
}