trail_leaderboard / e3c_llm_results /Qwen /Qwen2.5-32B-Instruct_0_GR.json
Sfarzi's picture
Initial clone with modifications
f7a50a0
{
"average_CPS": 46.818379166666666,
"config": {
"model_name": "Qwen/Qwen2.5-32B-Instruct",
"num_fewshot": "0",
"batch_size": 1,
"LANG": "GR",
"model": "Qwen/Qwen2.5-32B-Instruct",
"base_model": "Qwen2ForCausalLM",
"revision": "5ede1c97bbab6ce5cda5812749b4c0bdf79b18dd",
"submitted_time": "2024-09-17 04:17:55+00:00",
"num_params_billion": 32.763876352,
"language": "en"
},
"tasks": {
"NER": {
"prompts": [
{
"prompt": "p1",
"metric": "f1",
"value": 59.760000000000005,
"stderr": 0.0
},
{
"prompt": "p2",
"metric": "f1",
"value": 15.68,
"stderr": 0.0
},
{
"prompt": "p3",
"metric": "f1",
"value": 59.760000000000005,
"stderr": 0.0
}
],
"average_accuracy": 45.06666666666666,
"best_prompt": 59.760000000000005,
"prompt_id": "p1",
"CPS": 50.979264,
"is_dummy": false,
"std_accuracy": 25.449599865878707
},
"RE": {
"prompts": [
{
"prompt": "p1",
"metric": "f1",
"value": 43.93,
"stderr": 0.0
},
{
"prompt": "p2",
"metric": "f1",
"value": 40.83,
"stderr": 0.0
},
{
"prompt": "p3",
"metric": "f1",
"value": 38.34,
"stderr": 0.0
}
],
"average_accuracy": 41.03333333333333,
"best_prompt": 43.93,
"prompt_id": "p1",
"CPS": 42.65749433333333,
"is_dummy": false,
"std_accuracy": 2.800541614283445
},
"RML": {
"prompts": [
{
"prompt": "p1",
"metric": "f1",
"value": 0.0,
"stderr": 0.0
}
],
"average_accuracy": null,
"std_accuracy": null,
"best_prompt": null,
"prompt_id": null,
"CPS": null,
"is_dummy": true
},
"HIS": {
"prompts": [
{
"prompt": "p1",
"metric": "f1",
"value": 0.0,
"stderr": 0.0
}
],
"average_accuracy": null,
"std_accuracy": null,
"best_prompt": null,
"prompt_id": null,
"CPS": null,
"is_dummy": true
},
"DIA": {
"prompts": [
{
"prompt": "p1",
"metric": "f1",
"value": 0.0,
"stderr": 0.0
}
],
"average_accuracy": null,
"std_accuracy": null,
"best_prompt": null,
"prompt_id": null,
"CPS": null,
"is_dummy": true
}
}
}