trail_leaderboard / e3c_llm_results /Qwen /Qwen2.5-14B-Instruct-1M_0_GR.json
Sfarzi's picture
Initial clone with modifications
f7a50a0
{
"average_CPS": 27.333585333333332,
"config": {
"model_name": "Qwen/Qwen2.5-14B-Instruct-1M",
"num_fewshot": "0",
"batch_size": 1,
"LANG": "GR",
"model": "Qwen/Qwen2.5-14B-Instruct-1M",
"base_model": "Qwen2ForCausalLM",
"revision": "620fad32de7bdd2293b3d99b39eba2fe63e97438",
"submitted_time": "2025-01-23 13:23:24+00:00",
"num_params_billion": 14.770033664,
"language": "en"
},
"tasks": {
"NER": {
"prompts": [
{
"prompt": "p1",
"metric": "f1",
"value": 13.389999999999999,
"stderr": 0.0
},
{
"prompt": "p2",
"metric": "f1",
"value": 11.91,
"stderr": 0.0
},
{
"prompt": "p3",
"metric": "f1",
"value": 13.389999999999999,
"stderr": 0.0
}
],
"average_accuracy": 12.896666666666667,
"best_prompt": 13.389999999999999,
"prompt_id": "p1",
"CPS": 13.323942666666666,
"is_dummy": false,
"std_accuracy": 0.8544783984006453
},
"RE": {
"prompts": [
{
"prompt": "p1",
"metric": "f1",
"value": 37.96,
"stderr": 0.0
},
{
"prompt": "p2",
"metric": "f1",
"value": 42.66,
"stderr": 0.0
},
{
"prompt": "p3",
"metric": "f1",
"value": 38.1,
"stderr": 0.0
}
],
"average_accuracy": 39.57333333333333,
"best_prompt": 42.66,
"prompt_id": "p2",
"CPS": 41.343227999999996,
"is_dummy": false,
"std_accuracy": 2.6740481172434647
},
"RML": {
"prompts": [
{
"prompt": "p1",
"metric": "f1",
"value": 0.0,
"stderr": 0.0
}
],
"average_accuracy": null,
"std_accuracy": null,
"best_prompt": null,
"prompt_id": null,
"CPS": null,
"is_dummy": true
},
"HIS": {
"prompts": [
{
"prompt": "p1",
"metric": "f1",
"value": 0.0,
"stderr": 0.0
}
],
"average_accuracy": null,
"std_accuracy": null,
"best_prompt": null,
"prompt_id": null,
"CPS": null,
"is_dummy": true
},
"DIA": {
"prompts": [
{
"prompt": "p1",
"metric": "f1",
"value": 0.0,
"stderr": 0.0
}
],
"average_accuracy": null,
"std_accuracy": null,
"best_prompt": null,
"prompt_id": null,
"CPS": null,
"is_dummy": true
}
}
}