trail_leaderboard / e3c_llm_results /Qwen /Qwen2.5-32B-Instruct_0_EN.json
Sfarzi's picture
Initial clone with modifications
f7a50a0
{
"average_CPS": 16.729891099999996,
"config": {
"model_name": "Qwen/Qwen2.5-32B-Instruct",
"num_fewshot": "0",
"batch_size": 1,
"LANG": "EN",
"model": "Qwen/Qwen2.5-32B-Instruct",
"base_model": "Qwen2ForCausalLM",
"revision": "5ede1c97bbab6ce5cda5812749b4c0bdf79b18dd",
"submitted_time": "2024-09-17 04:17:55+00:00",
"num_params_billion": 32.763876352,
"language": "en"
},
"tasks": {
"NER": {
"prompts": [
{
"prompt": "p1",
"metric": "f1",
"value": 38.04,
"stderr": 0.0
},
{
"prompt": "p2",
"metric": "f1",
"value": 30.680000000000003,
"stderr": 0.0
},
{
"prompt": "p3",
"metric": "f1",
"value": 29.64,
"stderr": 0.0
}
],
"average_accuracy": 32.78666666666667,
"best_prompt": 38.04,
"prompt_id": "p1",
"CPS": 36.041632,
"is_dummy": false,
"std_accuracy": 4.5791411130618505
},
"RE": {
"prompts": [
{
"prompt": "p1",
"metric": "f1",
"value": 47.339999999999996,
"stderr": 0.0
},
{
"prompt": "p2",
"metric": "f1",
"value": 46.489999999999995,
"stderr": 0.0
},
{
"prompt": "p3",
"metric": "f1",
"value": 45.910000000000004,
"stderr": 0.0
}
],
"average_accuracy": 46.57999999999999,
"best_prompt": 47.339999999999996,
"prompt_id": "p1",
"CPS": 46.98021599999999,
"is_dummy": false,
"std_accuracy": 0.7192357054540571
},
"RML": {
"prompts": [
{
"prompt": "p1",
"metric": "f1",
"value": 0.05,
"stderr": 0.0
},
{
"prompt": "p1",
"metric": "f1",
"value": 0.0,
"stderr": 0.0
},
{
"prompt": "p2",
"metric": "f1",
"value": 0.5700000000000001,
"stderr": 0.0
},
{
"prompt": "p3",
"metric": "f1",
"value": 0.0,
"stderr": 0.0
}
],
"average_accuracy": 0.15500000000000003,
"best_prompt": 0.5700000000000001,
"prompt_id": "p2",
"CPS": 0.5676345,
"is_dummy": false,
"std_accuracy": 0.27766886753829645
},
"DIA": {
"prompts": [
{
"prompt": "p1",
"metric": "f1",
"value": 0.0,
"stderr": 0.0
},
{
"prompt": "p1",
"metric": "f1",
"value": 0.0,
"stderr": 0.0
},
{
"prompt": "p2",
"metric": "f1",
"value": 0.0,
"stderr": 0.0
},
{
"prompt": "p3",
"metric": "f1",
"value": 0.06,
"stderr": 0.0
}
],
"average_accuracy": 0.015,
"best_prompt": 0.06,
"prompt_id": "p3",
"CPS": 0.059973,
"is_dummy": false,
"std_accuracy": 0.03
},
"HIS": {
"prompts": [
{
"prompt": "p1",
"metric": "f1",
"value": 0.0,
"stderr": 0.0
},
{
"prompt": "p2",
"metric": "f1",
"value": 0.0,
"stderr": 0.0
},
{
"prompt": "p3",
"metric": "f1",
"value": 0.0,
"stderr": 0.0
}
],
"average_accuracy": 0.0,
"best_prompt": 0.0,
"prompt_id": "p1",
"CPS": 0.0,
"is_dummy": false,
"std_accuracy": 0.0
}
}
}