trail_leaderboard / e3c_llm_results /Qwen /Qwen2.5-14B-Instruct-1M_10_EN.json
Sfarzi's picture
Initial clone with modifications
f7a50a0
{
"average_CPS": 43.13491793333334,
"config": {
"model_name": "Qwen/Qwen2.5-14B-Instruct-1M",
"num_fewshot": "10",
"batch_size": 1,
"LANG": "EN",
"model": "Qwen/Qwen2.5-14B-Instruct-1M",
"base_model": "Qwen2ForCausalLM",
"revision": "620fad32de7bdd2293b3d99b39eba2fe63e97438",
"submitted_time": "2025-01-23 13:23:24+00:00",
"num_params_billion": 14.770033664,
"language": "en"
},
"tasks": {
"NER": {
"prompts": [
{
"prompt": "p1",
"metric": "f1",
"value": 60.91,
"stderr": 0.0
},
{
"prompt": "p2",
"metric": "f1",
"value": 56.46,
"stderr": 0.0
},
{
"prompt": "p3",
"metric": "f1",
"value": 62.43,
"stderr": 0.0
}
],
"average_accuracy": 59.93333333333334,
"best_prompt": 62.43,
"prompt_id": "p3",
"CPS": 60.871331000000005,
"is_dummy": false,
"std_accuracy": 3.1025204807274562
},
"RE": {
"prompts": [
{
"prompt": "p1",
"metric": "f1",
"value": 63.32,
"stderr": 0.0
},
{
"prompt": "p2",
"metric": "f1",
"value": 60.25,
"stderr": 0.0
},
{
"prompt": "p3",
"metric": "f1",
"value": 61.33,
"stderr": 0.0
}
],
"average_accuracy": 61.633333333333326,
"best_prompt": 63.32,
"prompt_id": "p1",
"CPS": 62.25200266666666,
"is_dummy": false,
"std_accuracy": 1.5573160672558843
},
"RML": {
"prompts": [
{
"prompt": "p1",
"metric": "f1",
"value": 21.29,
"stderr": 0.0
},
{
"prompt": "p2",
"metric": "f1",
"value": 32.22,
"stderr": 0.0
},
{
"prompt": "p3",
"metric": "f1",
"value": 31.78,
"stderr": 0.0
}
],
"average_accuracy": 28.429999999999996,
"best_prompt": 32.22,
"prompt_id": "p2",
"CPS": 30.998862,
"is_dummy": false,
"std_accuracy": 6.1873338361526935
},
"DIA": {
"prompts": [
{
"prompt": "p1",
"metric": "f1",
"value": 30.73,
"stderr": 0.0
},
{
"prompt": "p2",
"metric": "f1",
"value": 11.37,
"stderr": 0.0
},
{
"prompt": "p3",
"metric": "f1",
"value": 7.64,
"stderr": 0.0
}
],
"average_accuracy": 16.580000000000002,
"best_prompt": 30.73,
"prompt_id": "p1",
"CPS": 26.381705,
"is_dummy": false,
"std_accuracy": 12.395366069624568
},
"HIS": {
"prompts": [
{
"prompt": "p1",
"metric": "f1",
"value": 12.44,
"stderr": 0.0
},
{
"prompt": "p2",
"metric": "f1",
"value": 44.29,
"stderr": 0.0
},
{
"prompt": "p3",
"metric": "f1",
"value": 14.37,
"stderr": 0.0
}
],
"average_accuracy": 23.7,
"best_prompt": 44.29,
"prompt_id": "p2",
"CPS": 35.170689,
"is_dummy": false,
"std_accuracy": 17.857555823796268
}
}
}