trail_leaderboard / e3c_llm_results /Qwen /Qwen2.5-32B-Instruct_10_IT.json
Sfarzi's picture
Initial clone with modifications
f7a50a0
{
"average_CPS": 32.75043995,
"config": {
"model_name": "Qwen/Qwen2.5-32B-Instruct",
"num_fewshot": "10",
"batch_size": 1,
"LANG": "IT",
"model": "Qwen/Qwen2.5-32B-Instruct",
"base_model": "Qwen2ForCausalLM",
"revision": "5ede1c97bbab6ce5cda5812749b4c0bdf79b18dd",
"submitted_time": "2024-09-17 04:17:55+00:00",
"num_params_billion": 32.763876352,
"language": "en"
},
"tasks": {
"NER": {
"prompts": [
{
"prompt": "p1",
"metric": "f1",
"value": 69.34,
"stderr": 0.0
},
{
"prompt": "p2",
"metric": "f1",
"value": 71.52,
"stderr": 0.0
},
{
"prompt": "p3",
"metric": "f1",
"value": 69.3,
"stderr": 0.0
}
],
"average_accuracy": 70.05333333333334,
"best_prompt": 71.52,
"prompt_id": "p2",
"CPS": 70.47104,
"is_dummy": false,
"std_accuracy": 1.2703280416228429
},
"RE": {
"prompts": [
{
"prompt": "p1",
"metric": "f1",
"value": 58.01,
"stderr": 0.0
},
{
"prompt": "p2",
"metric": "f1",
"value": 55.95,
"stderr": 0.0
},
{
"prompt": "p3",
"metric": "f1",
"value": 55.26,
"stderr": 0.0
}
],
"average_accuracy": 56.406666666666666,
"best_prompt": 58.01,
"prompt_id": "p1",
"CPS": 57.079906333333334,
"is_dummy": false,
"std_accuracy": 1.4307457263026617
},
"RML": {
"prompts": [
{
"prompt": "p1",
"metric": "f1",
"value": 3.9800000000000004,
"stderr": 0.0
},
{
"prompt": "p2",
"metric": "f1",
"value": 5.99,
"stderr": 0.0
},
{
"prompt": "p3",
"metric": "f1",
"value": 10.25,
"stderr": 0.0
},
{
"prompt": "p3",
"metric": "f1",
"value": 10.25,
"stderr": 0.0
}
],
"average_accuracy": 7.6175,
"best_prompt": 10.25,
"prompt_id": "p3",
"CPS": 9.980168749999999,
"is_dummy": false,
"std_accuracy": 3.1485591942982425
},
"DIA": {
"prompts": [
{
"prompt": "p1",
"metric": "f1",
"value": 23.22,
"stderr": 0.0
},
{
"prompt": "p2",
"metric": "f1",
"value": 1.09,
"stderr": 0.0
},
{
"prompt": "p3",
"metric": "f1",
"value": 8.28,
"stderr": 0.0
}
],
"average_accuracy": 10.863333333333332,
"best_prompt": 23.22,
"prompt_id": "p1",
"CPS": 20.350782,
"is_dummy": false,
"std_accuracy": 11.288907534980225
},
"HIS": {
"prompts": [
{
"prompt": "p1",
"metric": "f1",
"value": 1.8599999999999999,
"stderr": 0.0
},
{
"prompt": "p2",
"metric": "f1",
"value": 6.02,
"stderr": 0.0
},
{
"prompt": "p3",
"metric": "f1",
"value": 2.7199999999999998,
"stderr": 0.0
}
],
"average_accuracy": 3.5333333333333328,
"best_prompt": 6.02,
"prompt_id": "p2",
"CPS": 5.8703026666666664,
"is_dummy": false,
"std_accuracy": 2.196026715077331
}
}
}