trail_leaderboard / e3c_llm_results /Qwen /Qwen2.5-14B-Instruct-1M_10_IT.json
Sfarzi's picture
Initial clone with modifications
f7a50a0
{
"average_CPS": 43.527679133333336,
"config": {
"model_name": "Qwen/Qwen2.5-14B-Instruct-1M",
"num_fewshot": "10",
"batch_size": 1,
"LANG": "IT",
"model": "Qwen/Qwen2.5-14B-Instruct-1M",
"base_model": "Qwen2ForCausalLM",
"revision": "620fad32de7bdd2293b3d99b39eba2fe63e97438",
"submitted_time": "2025-01-23 13:23:24+00:00",
"num_params_billion": 14.770033664,
"language": "en"
},
"tasks": {
"NER": {
"prompts": [
{
"prompt": "p1",
"metric": "f1",
"value": 67.19000000000001,
"stderr": 0.0
},
{
"prompt": "p2",
"metric": "f1",
"value": 63.27,
"stderr": 0.0
},
{
"prompt": "p3",
"metric": "f1",
"value": 66.61,
"stderr": 0.0
}
],
"average_accuracy": 65.69,
"best_prompt": 67.19000000000001,
"prompt_id": "p1",
"CPS": 66.18215000000001,
"is_dummy": false,
"std_accuracy": 2.1157504578754107
},
"RE": {
"prompts": [
{
"prompt": "p1",
"metric": "f1",
"value": 57.67,
"stderr": 0.0
},
{
"prompt": "p2",
"metric": "f1",
"value": 59.98,
"stderr": 0.0
},
{
"prompt": "p3",
"metric": "f1",
"value": 60.92999999999999,
"stderr": 0.0
}
],
"average_accuracy": 59.526666666666664,
"best_prompt": 60.92999999999999,
"prompt_id": "p3",
"CPS": 60.07494899999999,
"is_dummy": false,
"std_accuracy": 1.6766136505865978
},
"RML": {
"prompts": [
{
"prompt": "p1",
"metric": "f1",
"value": 11.110000000000001,
"stderr": 0.0
},
{
"prompt": "p2",
"metric": "f1",
"value": 15.989999999999998,
"stderr": 0.0
},
{
"prompt": "p3",
"metric": "f1",
"value": 19.6,
"stderr": 0.0
}
],
"average_accuracy": 15.566666666666668,
"best_prompt": 19.6,
"prompt_id": "p3",
"CPS": 18.80946666666667,
"is_dummy": false,
"std_accuracy": 4.260801958943097
},
"DIA": {
"prompts": [
{
"prompt": "p1",
"metric": "f1",
"value": 44.07,
"stderr": 0.0
},
{
"prompt": "p2",
"metric": "f1",
"value": 13.28,
"stderr": 0.0
},
{
"prompt": "p3",
"metric": "f1",
"value": 17.53,
"stderr": 0.0
}
],
"average_accuracy": 24.959999999999997,
"best_prompt": 44.07,
"prompt_id": "p1",
"CPS": 35.648223,
"is_dummy": false,
"std_accuracy": 16.68561356378602
},
"HIS": {
"prompts": [
{
"prompt": "p1",
"metric": "f1",
"value": 8.17,
"stderr": 0.0
},
{
"prompt": "p2",
"metric": "f1",
"value": 51.03,
"stderr": 0.0
},
{
"prompt": "p3",
"metric": "f1",
"value": 10.96,
"stderr": 0.0
}
],
"average_accuracy": 23.386666666666667,
"best_prompt": 51.03,
"prompt_id": "p2",
"CPS": 36.923607000000004,
"is_dummy": false,
"std_accuracy": 23.980438555900797
}
}
}