trail_leaderboard / e3c_llm_results /HiTZ /Medical-mT5-large_0_SK.json
Sfarzi's picture
Initial clone with modifications
f7a50a0
{
"average_CPS": 4.3259333333333325,
"config": {
"model_name": "HiTZ/Medical-mT5-large",
"num_fewshot": "0",
"batch_size": 1,
"LANG": "SK",
"model": "HiTZ/Medical-mT5-large",
"base_model": "MT5ForConditionalGeneration",
"revision": "e8ae7101f0ab1ed5b8add8846e44a2d39f6e2c47",
"submitted_time": "2023-10-31 15:15:15+00:00",
"num_params_billion": null,
"language": "en_es_fr_it"
},
"tasks": {
"NER": {
"prompts": [
{
"prompt": "p1",
"metric": "f1",
"value": 8.799999999999999,
"stderr": 0.0
},
{
"prompt": "p2",
"metric": "f1",
"value": 3.75,
"stderr": 0.0
},
{
"prompt": "p3",
"metric": "f1",
"value": 8.799999999999999,
"stderr": 0.0
}
],
"average_accuracy": 7.116666666666666,
"best_prompt": 8.799999999999999,
"prompt_id": "p1",
"CPS": 8.651866666666665,
"is_dummy": false,
"std_accuracy": 2.9156188594076093
},
"RE": {
"prompts": [
{
"prompt": "p1",
"metric": "f1",
"value": 0.0,
"stderr": 0.0
},
{
"prompt": "p2",
"metric": "f1",
"value": 0.0,
"stderr": 0.0
},
{
"prompt": "p3",
"metric": "f1",
"value": 0.0,
"stderr": 0.0
}
],
"average_accuracy": 0.0,
"best_prompt": 0.0,
"prompt_id": "p1",
"CPS": 0.0,
"is_dummy": false,
"std_accuracy": 0.0
},
"RML": {
"prompts": [
{
"prompt": "p1",
"metric": "f1",
"value": 0.0,
"stderr": 0.0
}
],
"average_accuracy": null,
"std_accuracy": null,
"best_prompt": null,
"prompt_id": null,
"CPS": null,
"is_dummy": true
},
"HIS": {
"prompts": [
{
"prompt": "p1",
"metric": "f1",
"value": 0.0,
"stderr": 0.0
}
],
"average_accuracy": null,
"std_accuracy": null,
"best_prompt": null,
"prompt_id": null,
"CPS": null,
"is_dummy": true
},
"DIA": {
"prompts": [
{
"prompt": "p1",
"metric": "f1",
"value": 0.0,
"stderr": 0.0
}
],
"average_accuracy": null,
"std_accuracy": null,
"best_prompt": null,
"prompt_id": null,
"CPS": null,
"is_dummy": true
}
}
}