it-no-bio-20251014-t10 / reports.json
SimoneAstarita's picture
Upload folder using huggingface_hub
4a4d849 verified
{
"overall": {
"at_0.5": {
"precision_macro": 0.8871660305343512,
"recall_macro": 0.8966275659824048,
"f1_macro": 0.8917858651698956,
"precision_weighted": 0.9334314850372314,
"recall_weighted": 0.9325153374233128,
"f1_weighted": 0.932922632145847,
"accuracy": 0.9325153374233128,
"confusion_matrix": [
[
126,
6
],
[
5,
26
]
],
"classification_report": " precision recall f1-score support\n\n no-recl (0) 0.9618 0.9545 0.9582 132\n recl (1) 0.8125 0.8387 0.8254 31\n\n accuracy 0.9325 163\n macro avg 0.8872 0.8966 0.8918 163\nweighted avg 0.9334 0.9325 0.9329 163\n",
"threshold": 0.5
},
"at_best_global": {
"precision_macro": 0.9187091503267973,
"recall_macro": 0.875733137829912,
"f1_macro": 0.8951363870303655,
"precision_weighted": 0.9371867356349493,
"recall_weighted": 0.9386503067484663,
"f1_weighted": 0.9369926336163481,
"accuracy": 0.9386503067484663,
"confusion_matrix": [
[
129,
3
],
[
7,
24
]
],
"classification_report": " precision recall f1-score support\n\n no-recl (0) 0.9485 0.9773 0.9627 132\n recl (1) 0.8889 0.7742 0.8276 31\n\n accuracy 0.9387 163\n macro avg 0.9187 0.8757 0.8951 163\nweighted avg 0.9372 0.9387 0.9370 163\n",
"threshold": 0.55
},
"at_best_by_lang": {
"precision_macro": 0.9187091503267973,
"recall_macro": 0.875733137829912,
"f1_macro": 0.8951363870303655,
"precision_weighted": 0.9371867356349493,
"recall_weighted": 0.9386503067484663,
"f1_weighted": 0.9369926336163481,
"accuracy": 0.9386503067484663,
"confusion_matrix": [
[
129,
3
],
[
7,
24
]
],
"classification_report": " precision recall f1-score support\n\n no-recl (0) 0.9485 0.9773 0.9627 132\n recl (1) 0.8889 0.7742 0.8276 31\n\n accuracy 0.9387 163\n macro avg 0.9187 0.8757 0.8951 163\nweighted avg 0.9372 0.9387 0.9370 163\n",
"thresholds_by_lang": {
"it": 0.55
}
}
},
"thresholds": {
"global_best": {
"threshold": 0.55,
"f1_macro": 0.8951363870303655,
"precision_macro": 0.9187091503267973,
"recall_macro": 0.875733137829912
},
"by_lang_best": {
"it": {
"threshold": 0.55,
"f1_macro": 0.8951363870303655,
"precision_macro": 0.9187091503267973,
"recall_macro": 0.875733137829912
}
},
"default": 0.5
},
"per_lang": {
"at_0.5": [
{
"lang": "it",
"n": 163,
"accuracy": 0.9325153374233128,
"f1_macro": 0.8917858651698956,
"precision_macro": 0.8871660305343512,
"recall_macro": 0.8966275659824048,
"f1_weighted": 0.932922632145847,
"precision_weighted": 0.9334314850372314,
"recall_weighted": 0.9325153374233128
}
],
"at_best_global": [
{
"lang": "it",
"n": 163,
"accuracy": 0.9386503067484663,
"f1_macro": 0.8951363870303655,
"precision_macro": 0.9187091503267973,
"recall_macro": 0.875733137829912,
"f1_weighted": 0.9369926336163481,
"precision_weighted": 0.9371867356349493,
"recall_weighted": 0.9386503067484663
}
],
"at_best_by_lang": [
{
"lang": "it",
"n": 163,
"accuracy": 0.9386503067484663,
"f1_macro": 0.8951363870303655,
"precision_macro": 0.9187091503267973,
"recall_macro": 0.875733137829912,
"f1_weighted": 0.9369926336163481,
"precision_weighted": 0.9371867356349493,
"recall_weighted": 0.9386503067484663
}
]
}
}