Skip to content

Commit

Permalink
add german results for ger models (#96)
Browse files Browse the repository at this point in the history
  • Loading branch information
sam-hey authored Jan 22, 2025
1 parent 7573d55 commit f0b35fd
Show file tree
Hide file tree
Showing 26 changed files with 2,694 additions and 78 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,137 @@
{
"dataset_revision": "1399c76144fd37290681b995c656ef9b2e06e26d",
"task_name": "AmazonReviewsClassification",
"mteb_version": "1.29.10",
"scores": {
"validation": [
{
"accuracy": 0.4614,
"f1": 0.435978,
"f1_weighted": 0.435978,
"scores_per_experiment": [
{
"accuracy": 0.4848,
"f1": 0.46934,
"f1_weighted": 0.46934
},
{
"accuracy": 0.4628,
"f1": 0.43203,
"f1_weighted": 0.43203
},
{
"accuracy": 0.4528,
"f1": 0.421687,
"f1_weighted": 0.421687
},
{
"accuracy": 0.4986,
"f1": 0.467483,
"f1_weighted": 0.467483
},
{
"accuracy": 0.471,
"f1": 0.443669,
"f1_weighted": 0.443669
},
{
"accuracy": 0.464,
"f1": 0.432224,
"f1_weighted": 0.432224
},
{
"accuracy": 0.4034,
"f1": 0.396426,
"f1_weighted": 0.396426
},
{
"accuracy": 0.4938,
"f1": 0.466621,
"f1_weighted": 0.466621
},
{
"accuracy": 0.4492,
"f1": 0.42017,
"f1_weighted": 0.42017
},
{
"accuracy": 0.4336,
"f1": 0.410134,
"f1_weighted": 0.410134
}
],
"main_score": 0.4614,
"hf_subset": "de",
"languages": [
"deu-Latn"
]
}
],
"test": [
{
"accuracy": 0.46368,
"f1": 0.439507,
"f1_weighted": 0.439507,
"scores_per_experiment": [
{
"accuracy": 0.4806,
"f1": 0.463278,
"f1_weighted": 0.463278
},
{
"accuracy": 0.4642,
"f1": 0.433653,
"f1_weighted": 0.433653
},
{
"accuracy": 0.4534,
"f1": 0.423666,
"f1_weighted": 0.423666
},
{
"accuracy": 0.4966,
"f1": 0.468083,
"f1_weighted": 0.468083
},
{
"accuracy": 0.4776,
"f1": 0.450634,
"f1_weighted": 0.450634
},
{
"accuracy": 0.4768,
"f1": 0.450572,
"f1_weighted": 0.450572
},
{
"accuracy": 0.4174,
"f1": 0.409543,
"f1_weighted": 0.409543
},
{
"accuracy": 0.4944,
"f1": 0.46879,
"f1_weighted": 0.46879
},
{
"accuracy": 0.4498,
"f1": 0.419052,
"f1_weighted": 0.419052
},
{
"accuracy": 0.426,
"f1": 0.407799,
"f1_weighted": 0.407799
}
],
"main_score": 0.46368,
"hf_subset": "de",
"languages": [
"deu-Latn"
]
}
]
},
"evaluation_time": 126.27368593215942,
"kg_co2_emissions": null
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
{
"dataset_revision": "a2dd5b02a77de3466a3eaa98ae586b5610314496",
"task_name": "BlurbsClusteringP2P",
"mteb_version": "1.29.10",
"scores": {
"test": [
{
"v_measure": 0.386876,
"v_measure_std": 0.083317,
"v_measures": [
0.336807,
0.321236,
0.312527,
0.321698,
0.315842,
0.318106,
0.344385,
0.303119,
0.327119,
0.324908,
0.366773,
0.496408,
0.334427,
0.286325,
0.264392,
0.364911,
0.24358,
0.495096,
0.479464,
0.479456,
0.475731,
0.472335,
0.475948,
0.461984,
0.480143,
0.478474,
0.475722,
0.475621
],
"main_score": 0.386876,
"hf_subset": "default",
"languages": [
"deu-Latn"
]
}
]
},
"evaluation_time": 4207.973029613495,
"kg_co2_emissions": null
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
{
"dataset_revision": "22793b6a6465bf00120ad525e38c51210858132c",
"task_name": "BlurbsClusteringS2S",
"mteb_version": "1.29.10",
"scores": {
"test": [
{
"v_measure": 0.162811,
"v_measure_std": 0.082541,
"v_measures": [
0.091455,
0.077184,
0.106489,
0.087398,
0.07582,
0.100122,
0.088196,
0.077762,
0.100258,
0.094399,
0.124095,
0.246679,
0.051443,
0.085061,
0.094462,
0.223444,
0.100991,
0.322938,
0.217459,
0.208868,
0.206221,
0.214174,
0.271484,
0.218194,
0.230808,
0.297791,
0.284434,
0.261075
],
"main_score": 0.162811,
"hf_subset": "default",
"languages": [
"deu-Latn"
]
}
]
},
"evaluation_time": 278.6751754283905,
"kg_co2_emissions": null
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,137 @@
{
"dataset_revision": "d80d48c1eb48d3562165c59d59d0034df9fff0bf",
"task_name": "MTOPDomainClassification",
"mteb_version": "1.29.10",
"scores": {
"validation": [
{
"accuracy": 0.904848,
"f1": 0.896445,
"f1_weighted": 0.904566,
"scores_per_experiment": [
{
"accuracy": 0.888705,
"f1": 0.883251,
"f1_weighted": 0.888924
},
{
"accuracy": 0.905234,
"f1": 0.894371,
"f1_weighted": 0.90487
},
{
"accuracy": 0.907989,
"f1": 0.899887,
"f1_weighted": 0.907398
},
{
"accuracy": 0.912397,
"f1": 0.904507,
"f1_weighted": 0.911696
},
{
"accuracy": 0.905785,
"f1": 0.895272,
"f1_weighted": 0.904943
},
{
"accuracy": 0.906887,
"f1": 0.895359,
"f1_weighted": 0.90575
},
{
"accuracy": 0.905785,
"f1": 0.897953,
"f1_weighted": 0.905034
},
{
"accuracy": 0.898072,
"f1": 0.894061,
"f1_weighted": 0.899223
},
{
"accuracy": 0.902479,
"f1": 0.894367,
"f1_weighted": 0.902354
},
{
"accuracy": 0.915152,
"f1": 0.905425,
"f1_weighted": 0.915466
}
],
"main_score": 0.904848,
"hf_subset": "de",
"languages": [
"deu-Latn"
]
}
],
"test": [
{
"accuracy": 0.912229,
"f1": 0.902667,
"f1_weighted": 0.911946,
"scores_per_experiment": [
{
"accuracy": 0.899408,
"f1": 0.892616,
"f1_weighted": 0.899651
},
{
"accuracy": 0.911524,
"f1": 0.901484,
"f1_weighted": 0.910879
},
{
"accuracy": 0.915187,
"f1": 0.904778,
"f1_weighted": 0.914295
},
{
"accuracy": 0.912651,
"f1": 0.90301,
"f1_weighted": 0.911949
},
{
"accuracy": 0.917723,
"f1": 0.905485,
"f1_weighted": 0.91666
},
{
"accuracy": 0.912933,
"f1": 0.900559,
"f1_weighted": 0.912304
},
{
"accuracy": 0.918287,
"f1": 0.907105,
"f1_weighted": 0.917834
},
{
"accuracy": 0.897999,
"f1": 0.891261,
"f1_weighted": 0.899309
},
{
"accuracy": 0.913215,
"f1": 0.90552,
"f1_weighted": 0.912925
},
{
"accuracy": 0.923359,
"f1": 0.914857,
"f1_weighted": 0.923654
}
],
"main_score": 0.912229,
"hf_subset": "de",
"languages": [
"deu-Latn"
]
}
]
},
"evaluation_time": 17.621785640716553,
"kg_co2_emissions": null
}
Loading

0 comments on commit f0b35fd

Please sign in to comment.