From f629bfcd9d2e58e95fd06140e2238e4a85c2f9be Mon Sep 17 00:00:00 2001 From: sam-hey <40773225+sam-hey@users.noreply.github.com> Date: Wed, 22 Jan 2025 06:54:43 +0000 Subject: [PATCH] add german results for ger models --- .../AmazonReviewsClassification.json | 137 +++++++++ .../BlurbsClusteringP2P.json | 50 ++++ .../BlurbsClusteringS2S.json | 50 ++++ .../MTOPDomainClassification.json | 137 +++++++++ .../MTOPIntentClassification.json | 137 +++++++++ .../MassiveScenarioClassification.json | 268 +++++++++++++----- .../model_meta.json | 2 +- .../1/model_meta.json | 2 +- .../1/model_meta.json | 2 +- .../GermanDPR.json | 158 +++++++++++ .../model_meta.json | 2 +- .../AmazonCounterfactualClassification.json | 181 ++++++++++++ .../AmazonReviewsClassification.json | 137 +++++++++ .../FalseFriendsGermanEnglish.json | 58 ++++ .../GermanDPR.json | 158 +++++++++++ .../GermanQuAD-Retrieval.json | 158 +++++++++++ .../MTOPDomainClassification.json | 137 +++++++++ .../MTOPIntentClassification.json | 137 +++++++++ .../PawsXPairClassification.json | 107 +++++++ .../XMarket.json | 158 +++++++++++ .../model_meta.json | 2 +- .../AmazonCounterfactualClassification.json | 181 ++++++++++++ .../AmazonReviewsClassification.json | 137 +++++++++ .../MTOPDomainClassification.json | 137 +++++++++ .../MTOPIntentClassification.json | 137 +++++++++ .../model_meta.json | 2 +- 26 files changed, 2694 insertions(+), 78 deletions(-) create mode 100644 results/BAAI__bge-m3/5617a9f61b028005a4858fdac845db406aefb181/AmazonReviewsClassification.json create mode 100644 results/BAAI__bge-m3/5617a9f61b028005a4858fdac845db406aefb181/BlurbsClusteringP2P.json create mode 100644 results/BAAI__bge-m3/5617a9f61b028005a4858fdac845db406aefb181/BlurbsClusteringS2S.json create mode 100644 results/BAAI__bge-m3/5617a9f61b028005a4858fdac845db406aefb181/MTOPDomainClassification.json create mode 100644 results/BAAI__bge-m3/5617a9f61b028005a4858fdac845db406aefb181/MTOPIntentClassification.json create mode 100644 results/Snowflake__snowflake-arctic-embed-l-v2.0/edc2df7b6c25794b340229ca082e7c78782e6374/GermanDPR.json create mode 100644 results/intfloat__multilingual-e5-small/fd1525a9fd15316a2d503bf26ab031a61d056e98/AmazonCounterfactualClassification.json create mode 100644 results/intfloat__multilingual-e5-small/fd1525a9fd15316a2d503bf26ab031a61d056e98/AmazonReviewsClassification.json create mode 100644 results/intfloat__multilingual-e5-small/fd1525a9fd15316a2d503bf26ab031a61d056e98/FalseFriendsGermanEnglish.json create mode 100644 results/intfloat__multilingual-e5-small/fd1525a9fd15316a2d503bf26ab031a61d056e98/GermanDPR.json create mode 100644 results/intfloat__multilingual-e5-small/fd1525a9fd15316a2d503bf26ab031a61d056e98/GermanQuAD-Retrieval.json create mode 100644 results/intfloat__multilingual-e5-small/fd1525a9fd15316a2d503bf26ab031a61d056e98/MTOPDomainClassification.json create mode 100644 results/intfloat__multilingual-e5-small/fd1525a9fd15316a2d503bf26ab031a61d056e98/MTOPIntentClassification.json create mode 100644 results/intfloat__multilingual-e5-small/fd1525a9fd15316a2d503bf26ab031a61d056e98/PawsXPairClassification.json create mode 100644 results/intfloat__multilingual-e5-small/fd1525a9fd15316a2d503bf26ab031a61d056e98/XMarket.json create mode 100644 results/shibing624__text2vec-base-multilingual/6633dc49e554de7105458f8f2e96445c6598e9d1/AmazonCounterfactualClassification.json create mode 100644 results/shibing624__text2vec-base-multilingual/6633dc49e554de7105458f8f2e96445c6598e9d1/AmazonReviewsClassification.json create mode 100644 results/shibing624__text2vec-base-multilingual/6633dc49e554de7105458f8f2e96445c6598e9d1/MTOPDomainClassification.json create mode 100644 results/shibing624__text2vec-base-multilingual/6633dc49e554de7105458f8f2e96445c6598e9d1/MTOPIntentClassification.json diff --git a/results/BAAI__bge-m3/5617a9f61b028005a4858fdac845db406aefb181/AmazonReviewsClassification.json b/results/BAAI__bge-m3/5617a9f61b028005a4858fdac845db406aefb181/AmazonReviewsClassification.json new file mode 100644 index 0000000000..955ca87236 --- /dev/null +++ b/results/BAAI__bge-m3/5617a9f61b028005a4858fdac845db406aefb181/AmazonReviewsClassification.json @@ -0,0 +1,137 @@ +{ + "dataset_revision": "1399c76144fd37290681b995c656ef9b2e06e26d", + "task_name": "AmazonReviewsClassification", + "mteb_version": "1.29.10", + "scores": { + "validation": [ + { + "accuracy": 0.4614, + "f1": 0.435978, + "f1_weighted": 0.435978, + "scores_per_experiment": [ + { + "accuracy": 0.4848, + "f1": 0.46934, + "f1_weighted": 0.46934 + }, + { + "accuracy": 0.4628, + "f1": 0.43203, + "f1_weighted": 0.43203 + }, + { + "accuracy": 0.4528, + "f1": 0.421687, + "f1_weighted": 0.421687 + }, + { + "accuracy": 0.4986, + "f1": 0.467483, + "f1_weighted": 0.467483 + }, + { + "accuracy": 0.471, + "f1": 0.443669, + "f1_weighted": 0.443669 + }, + { + "accuracy": 0.464, + "f1": 0.432224, + "f1_weighted": 0.432224 + }, + { + "accuracy": 0.4034, + "f1": 0.396426, + "f1_weighted": 0.396426 + }, + { + "accuracy": 0.4938, + "f1": 0.466621, + "f1_weighted": 0.466621 + }, + { + "accuracy": 0.4492, + "f1": 0.42017, + "f1_weighted": 0.42017 + }, + { + "accuracy": 0.4336, + "f1": 0.410134, + "f1_weighted": 0.410134 + } + ], + "main_score": 0.4614, + "hf_subset": "de", + "languages": [ + "deu-Latn" + ] + } + ], + "test": [ + { + "accuracy": 0.46368, + "f1": 0.439507, + "f1_weighted": 0.439507, + "scores_per_experiment": [ + { + "accuracy": 0.4806, + "f1": 0.463278, + "f1_weighted": 0.463278 + }, + { + "accuracy": 0.4642, + "f1": 0.433653, + "f1_weighted": 0.433653 + }, + { + "accuracy": 0.4534, + "f1": 0.423666, + "f1_weighted": 0.423666 + }, + { + "accuracy": 0.4966, + "f1": 0.468083, + "f1_weighted": 0.468083 + }, + { + "accuracy": 0.4776, + "f1": 0.450634, + "f1_weighted": 0.450634 + }, + { + "accuracy": 0.4768, + "f1": 0.450572, + "f1_weighted": 0.450572 + }, + { + "accuracy": 0.4174, + "f1": 0.409543, + "f1_weighted": 0.409543 + }, + { + "accuracy": 0.4944, + "f1": 0.46879, + "f1_weighted": 0.46879 + }, + { + "accuracy": 0.4498, + "f1": 0.419052, + "f1_weighted": 0.419052 + }, + { + "accuracy": 0.426, + "f1": 0.407799, + "f1_weighted": 0.407799 + } + ], + "main_score": 0.46368, + "hf_subset": "de", + "languages": [ + "deu-Latn" + ] + } + ] + }, + "evaluation_time": 126.27368593215942, + "kg_co2_emissions": null +} \ No newline at end of file diff --git a/results/BAAI__bge-m3/5617a9f61b028005a4858fdac845db406aefb181/BlurbsClusteringP2P.json b/results/BAAI__bge-m3/5617a9f61b028005a4858fdac845db406aefb181/BlurbsClusteringP2P.json new file mode 100644 index 0000000000..1614f83b53 --- /dev/null +++ b/results/BAAI__bge-m3/5617a9f61b028005a4858fdac845db406aefb181/BlurbsClusteringP2P.json @@ -0,0 +1,50 @@ +{ + "dataset_revision": "a2dd5b02a77de3466a3eaa98ae586b5610314496", + "task_name": "BlurbsClusteringP2P", + "mteb_version": "1.29.10", + "scores": { + "test": [ + { + "v_measure": 0.386876, + "v_measure_std": 0.083317, + "v_measures": [ + 0.336807, + 0.321236, + 0.312527, + 0.321698, + 0.315842, + 0.318106, + 0.344385, + 0.303119, + 0.327119, + 0.324908, + 0.366773, + 0.496408, + 0.334427, + 0.286325, + 0.264392, + 0.364911, + 0.24358, + 0.495096, + 0.479464, + 0.479456, + 0.475731, + 0.472335, + 0.475948, + 0.461984, + 0.480143, + 0.478474, + 0.475722, + 0.475621 + ], + "main_score": 0.386876, + "hf_subset": "default", + "languages": [ + "deu-Latn" + ] + } + ] + }, + "evaluation_time": 4207.973029613495, + "kg_co2_emissions": null +} \ No newline at end of file diff --git a/results/BAAI__bge-m3/5617a9f61b028005a4858fdac845db406aefb181/BlurbsClusteringS2S.json b/results/BAAI__bge-m3/5617a9f61b028005a4858fdac845db406aefb181/BlurbsClusteringS2S.json new file mode 100644 index 0000000000..b21432eda2 --- /dev/null +++ b/results/BAAI__bge-m3/5617a9f61b028005a4858fdac845db406aefb181/BlurbsClusteringS2S.json @@ -0,0 +1,50 @@ +{ + "dataset_revision": "22793b6a6465bf00120ad525e38c51210858132c", + "task_name": "BlurbsClusteringS2S", + "mteb_version": "1.29.10", + "scores": { + "test": [ + { + "v_measure": 0.162811, + "v_measure_std": 0.082541, + "v_measures": [ + 0.091455, + 0.077184, + 0.106489, + 0.087398, + 0.07582, + 0.100122, + 0.088196, + 0.077762, + 0.100258, + 0.094399, + 0.124095, + 0.246679, + 0.051443, + 0.085061, + 0.094462, + 0.223444, + 0.100991, + 0.322938, + 0.217459, + 0.208868, + 0.206221, + 0.214174, + 0.271484, + 0.218194, + 0.230808, + 0.297791, + 0.284434, + 0.261075 + ], + "main_score": 0.162811, + "hf_subset": "default", + "languages": [ + "deu-Latn" + ] + } + ] + }, + "evaluation_time": 278.6751754283905, + "kg_co2_emissions": null +} \ No newline at end of file diff --git a/results/BAAI__bge-m3/5617a9f61b028005a4858fdac845db406aefb181/MTOPDomainClassification.json b/results/BAAI__bge-m3/5617a9f61b028005a4858fdac845db406aefb181/MTOPDomainClassification.json new file mode 100644 index 0000000000..3c9001a417 --- /dev/null +++ b/results/BAAI__bge-m3/5617a9f61b028005a4858fdac845db406aefb181/MTOPDomainClassification.json @@ -0,0 +1,137 @@ +{ + "dataset_revision": "d80d48c1eb48d3562165c59d59d0034df9fff0bf", + "task_name": "MTOPDomainClassification", + "mteb_version": "1.29.10", + "scores": { + "validation": [ + { + "accuracy": 0.904848, + "f1": 0.896445, + "f1_weighted": 0.904566, + "scores_per_experiment": [ + { + "accuracy": 0.888705, + "f1": 0.883251, + "f1_weighted": 0.888924 + }, + { + "accuracy": 0.905234, + "f1": 0.894371, + "f1_weighted": 0.90487 + }, + { + "accuracy": 0.907989, + "f1": 0.899887, + "f1_weighted": 0.907398 + }, + { + "accuracy": 0.912397, + "f1": 0.904507, + "f1_weighted": 0.911696 + }, + { + "accuracy": 0.905785, + "f1": 0.895272, + "f1_weighted": 0.904943 + }, + { + "accuracy": 0.906887, + "f1": 0.895359, + "f1_weighted": 0.90575 + }, + { + "accuracy": 0.905785, + "f1": 0.897953, + "f1_weighted": 0.905034 + }, + { + "accuracy": 0.898072, + "f1": 0.894061, + "f1_weighted": 0.899223 + }, + { + "accuracy": 0.902479, + "f1": 0.894367, + "f1_weighted": 0.902354 + }, + { + "accuracy": 0.915152, + "f1": 0.905425, + "f1_weighted": 0.915466 + } + ], + "main_score": 0.904848, + "hf_subset": "de", + "languages": [ + "deu-Latn" + ] + } + ], + "test": [ + { + "accuracy": 0.912229, + "f1": 0.902667, + "f1_weighted": 0.911946, + "scores_per_experiment": [ + { + "accuracy": 0.899408, + "f1": 0.892616, + "f1_weighted": 0.899651 + }, + { + "accuracy": 0.911524, + "f1": 0.901484, + "f1_weighted": 0.910879 + }, + { + "accuracy": 0.915187, + "f1": 0.904778, + "f1_weighted": 0.914295 + }, + { + "accuracy": 0.912651, + "f1": 0.90301, + "f1_weighted": 0.911949 + }, + { + "accuracy": 0.917723, + "f1": 0.905485, + "f1_weighted": 0.91666 + }, + { + "accuracy": 0.912933, + "f1": 0.900559, + "f1_weighted": 0.912304 + }, + { + "accuracy": 0.918287, + "f1": 0.907105, + "f1_weighted": 0.917834 + }, + { + "accuracy": 0.897999, + "f1": 0.891261, + "f1_weighted": 0.899309 + }, + { + "accuracy": 0.913215, + "f1": 0.90552, + "f1_weighted": 0.912925 + }, + { + "accuracy": 0.923359, + "f1": 0.914857, + "f1_weighted": 0.923654 + } + ], + "main_score": 0.912229, + "hf_subset": "de", + "languages": [ + "deu-Latn" + ] + } + ] + }, + "evaluation_time": 17.621785640716553, + "kg_co2_emissions": null +} \ No newline at end of file diff --git a/results/BAAI__bge-m3/5617a9f61b028005a4858fdac845db406aefb181/MTOPIntentClassification.json b/results/BAAI__bge-m3/5617a9f61b028005a4858fdac845db406aefb181/MTOPIntentClassification.json new file mode 100644 index 0000000000..18eab69ef9 --- /dev/null +++ b/results/BAAI__bge-m3/5617a9f61b028005a4858fdac845db406aefb181/MTOPIntentClassification.json @@ -0,0 +1,137 @@ +{ + "dataset_revision": "ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba", + "task_name": "MTOPIntentClassification", + "mteb_version": "1.29.10", + "scores": { + "validation": [ + { + "accuracy": 0.657741, + "f1": 0.433268, + "f1_weighted": 0.690047, + "scores_per_experiment": [ + { + "accuracy": 0.661157, + "f1": 0.432011, + "f1_weighted": 0.696916 + }, + { + "accuracy": 0.678788, + "f1": 0.466991, + "f1_weighted": 0.714941 + }, + { + "accuracy": 0.663361, + "f1": 0.43114, + "f1_weighted": 0.696194 + }, + { + "accuracy": 0.649587, + "f1": 0.432822, + "f1_weighted": 0.674071 + }, + { + "accuracy": 0.662259, + "f1": 0.432395, + "f1_weighted": 0.69183 + }, + { + "accuracy": 0.627548, + "f1": 0.427565, + "f1_weighted": 0.660601 + }, + { + "accuracy": 0.672727, + "f1": 0.431111, + "f1_weighted": 0.700139 + }, + { + "accuracy": 0.660606, + "f1": 0.442187, + "f1_weighted": 0.696946 + }, + { + "accuracy": 0.650689, + "f1": 0.420515, + "f1_weighted": 0.684475 + }, + { + "accuracy": 0.650689, + "f1": 0.415943, + "f1_weighted": 0.684352 + } + ], + "main_score": 0.657741, + "hf_subset": "de", + "languages": [ + "deu-Latn" + ] + } + ], + "test": [ + { + "accuracy": 0.68084, + "f1": 0.461842, + "f1_weighted": 0.711913, + "scores_per_experiment": [ + { + "accuracy": 0.686391, + "f1": 0.470712, + "f1_weighted": 0.717191 + }, + { + "accuracy": 0.694844, + "f1": 0.472763, + "f1_weighted": 0.731372 + }, + { + "accuracy": 0.686954, + "f1": 0.459152, + "f1_weighted": 0.720311 + }, + { + "accuracy": 0.674556, + "f1": 0.468172, + "f1_weighted": 0.698181 + }, + { + "accuracy": 0.684982, + "f1": 0.464202, + "f1_weighted": 0.7154 + }, + { + "accuracy": 0.64328, + "f1": 0.44399, + "f1_weighted": 0.674767 + }, + { + "accuracy": 0.691181, + "f1": 0.460218, + "f1_weighted": 0.717318 + }, + { + "accuracy": 0.681037, + "f1": 0.467666, + "f1_weighted": 0.714447 + }, + { + "accuracy": 0.680473, + "f1": 0.453633, + "f1_weighted": 0.713016 + }, + { + "accuracy": 0.6847, + "f1": 0.457912, + "f1_weighted": 0.717126 + } + ], + "main_score": 0.68084, + "hf_subset": "de", + "languages": [ + "deu-Latn" + ] + } + ] + }, + "evaluation_time": 51.36896634101868, + "kg_co2_emissions": null +} \ No newline at end of file diff --git a/results/BAAI__bge-m3/5617a9f61b028005a4858fdac845db406aefb181/MassiveScenarioClassification.json b/results/BAAI__bge-m3/5617a9f61b028005a4858fdac845db406aefb181/MassiveScenarioClassification.json index 26ae367dd1..69f54209e4 100644 --- a/results/BAAI__bge-m3/5617a9f61b028005a4858fdac845db406aefb181/MassiveScenarioClassification.json +++ b/results/BAAI__bge-m3/5617a9f61b028005a4858fdac845db406aefb181/MassiveScenarioClassification.json @@ -1,137 +1,261 @@ { "dataset_revision": "fad2c6e8459f9e1c45d9315f4953d921437d70f8", - "evaluation_time": 13.257129430770874, - "kg_co2_emissions": 0.0024129392078880794, - "mteb_version": "1.14.12", + "task_name": "MassiveScenarioClassification", + "mteb_version": "1.29.10", "scores": { "test": [ { - "accuracy": 0.7342299932750505, - "f1": 0.727081073738889, - "f1_weighted": 0.7291454381904577, + "accuracy": 0.73423, + "f1": 0.727081, + "f1_weighted": 0.729145, "hf_subset": "ru", "languages": [ "rus-Cyrl" ], - "main_score": 0.7342299932750505, + "main_score": 0.73423, "scores_per_experiment": [ { - "accuracy": 0.7370544720914594, - "f1": 0.7320001869140739, - "f1_weighted": 0.7322000402888483 + "accuracy": 0.737054, + "f1": 0.732, + "f1_weighted": 0.7322 }, { - "accuracy": 0.7320107599193006, - "f1": 0.7312433708999367, - "f1_weighted": 0.7266766891375795 + "accuracy": 0.732011, + "f1": 0.731243, + "f1_weighted": 0.726677 }, { - "accuracy": 0.7437794216543376, - "f1": 0.7305428360973807, - "f1_weighted": 0.7393301741691624 + "accuracy": 0.743779, + "f1": 0.730543, + "f1_weighted": 0.73933 }, { - "accuracy": 0.7424344317417619, - "f1": 0.7358147374978421, - "f1_weighted": 0.7392674329786143 + "accuracy": 0.742434, + "f1": 0.735815, + "f1_weighted": 0.739267 }, { - "accuracy": 0.7387357094821789, - "f1": 0.7239180276829599, - "f1_weighted": 0.7295326553567506 + "accuracy": 0.738736, + "f1": 0.723918, + "f1_weighted": 0.729533 }, { - "accuracy": 0.7081371889710827, - "f1": 0.6974759366496573, - "f1_weighted": 0.6999339182674899 + "accuracy": 0.708137, + "f1": 0.697476, + "f1_weighted": 0.699934 }, { - "accuracy": 0.7310020174848688, - "f1": 0.7223636587611565, - "f1_weighted": 0.7270063978481861 + "accuracy": 0.731002, + "f1": 0.722364, + "f1_weighted": 0.727006 }, { - "accuracy": 0.7135171486213854, - "f1": 0.7135032404031129, - "f1_weighted": 0.7091690336631582 + "accuracy": 0.713517, + "f1": 0.713503, + "f1_weighted": 0.709169 }, { - "accuracy": 0.7508406186953598, - "f1": 0.7452560010429398, - "f1_weighted": 0.7487309951111808 + "accuracy": 0.750841, + "f1": 0.745256, + "f1_weighted": 0.748731 }, { - "accuracy": 0.7447881640887694, - "f1": 0.7386927414398298, - "f1_weighted": 0.7396070450836079 + "accuracy": 0.744788, + "f1": 0.738693, + "f1_weighted": 0.739607 } ] + }, + { + "accuracy": 0.73544, + "f1": 0.727401, + "f1_weighted": 0.728997, + "scores_per_experiment": [ + { + "accuracy": 0.742098, + "f1": 0.733146, + "f1_weighted": 0.737209 + }, + { + "accuracy": 0.747478, + "f1": 0.740965, + "f1_weighted": 0.741591 + }, + { + "accuracy": 0.735037, + "f1": 0.727365, + "f1_weighted": 0.728237 + }, + { + "accuracy": 0.731338, + "f1": 0.72088, + "f1_weighted": 0.727566 + }, + { + "accuracy": 0.732683, + "f1": 0.726029, + "f1_weighted": 0.722856 + }, + { + "accuracy": 0.70881, + "f1": 0.69902, + "f1_weighted": 0.69472 + }, + { + "accuracy": 0.736718, + "f1": 0.724222, + "f1_weighted": 0.728796 + }, + { + "accuracy": 0.725286, + "f1": 0.724655, + "f1_weighted": 0.724493 + }, + { + "accuracy": 0.750841, + "f1": 0.744536, + "f1_weighted": 0.747324 + }, + { + "accuracy": 0.744116, + "f1": 0.733189, + "f1_weighted": 0.737177 + } + ], + "main_score": 0.73544, + "hf_subset": "de", + "languages": [ + "deu-Latn" + ] } ], "validation": [ { - "accuracy": 0.7334481062469258, - "f1": 0.7219642986686683, - "f1_weighted": 0.7291686660772714, + "accuracy": 0.733448, + "f1": 0.721964, + "f1_weighted": 0.729169, "hf_subset": "ru", "languages": [ "rus-Cyrl" ], - "main_score": 0.7334481062469258, + "main_score": 0.733448, "scores_per_experiment": [ { - "accuracy": 0.750614854894245, - "f1": 0.7419129503258692, - "f1_weighted": 0.7482714805119889 + "accuracy": 0.750615, + "f1": 0.741913, + "f1_weighted": 0.748271 }, { - "accuracy": 0.721593703885883, - "f1": 0.7173307078578347, - "f1_weighted": 0.7184545272456723 + "accuracy": 0.721594, + "f1": 0.717331, + "f1_weighted": 0.718455 }, { - "accuracy": 0.7545499262174127, - "f1": 0.7386459559639673, - "f1_weighted": 0.7526213558774643 + "accuracy": 0.75455, + "f1": 0.738646, + "f1_weighted": 0.752621 }, { - "accuracy": 0.7220855878012788, - "f1": 0.7137947636981896, - "f1_weighted": 0.7165408498996397 + "accuracy": 0.722086, + "f1": 0.713795, + "f1_weighted": 0.716541 }, { - "accuracy": 0.7437284800787014, - "f1": 0.7227735633917522, - "f1_weighted": 0.7362798488981783 + "accuracy": 0.743728, + "f1": 0.722774, + "f1_weighted": 0.73628 }, { - "accuracy": 0.6960157402852927, - "f1": 0.6873383424519843, - "f1_weighted": 0.6875594902710235 + "accuracy": 0.696016, + "f1": 0.687338, + "f1_weighted": 0.687559 }, { - "accuracy": 0.7122479094933596, - "f1": 0.6940667638934372, - "f1_weighted": 0.7083866754061244 + "accuracy": 0.712248, + "f1": 0.694067, + "f1_weighted": 0.708387 }, { - "accuracy": 0.7255287752090507, - "f1": 0.7176890888958867, - "f1_weighted": 0.7231797626602348 + "accuracy": 0.725529, + "f1": 0.717689, + "f1_weighted": 0.72318 }, { - "accuracy": 0.7594687653713723, - "f1": 0.7506790383048325, - "f1_weighted": 0.7563092326920408 + "accuracy": 0.759469, + "f1": 0.750679, + "f1_weighted": 0.756309 }, { - "accuracy": 0.7486473192326611, - "f1": 0.7354118119029295, - "f1_weighted": 0.7440834373103473 + "accuracy": 0.748647, + "f1": 0.735412, + "f1_weighted": 0.744083 } ] + }, + { + "accuracy": 0.728037, + "f1": 0.714535, + "f1_weighted": 0.722801, + "scores_per_experiment": [ + { + "accuracy": 0.740285, + "f1": 0.72853, + "f1_weighted": 0.736101 + }, + { + "accuracy": 0.739302, + "f1": 0.725693, + "f1_weighted": 0.737135 + }, + { + "accuracy": 0.727004, + "f1": 0.715785, + "f1_weighted": 0.72249 + }, + { + "accuracy": 0.722086, + "f1": 0.703254, + "f1_weighted": 0.718177 + }, + { + "accuracy": 0.737334, + "f1": 0.723067, + "f1_weighted": 0.73131 + }, + { + "accuracy": 0.714707, + "f1": 0.700795, + "f1_weighted": 0.701288 + }, + { + "accuracy": 0.725037, + "f1": 0.70656, + "f1_weighted": 0.720189 + }, + { + "accuracy": 0.697, + "f1": 0.696585, + "f1_weighted": 0.694791 + }, + { + "accuracy": 0.740777, + "f1": 0.726883, + "f1_weighted": 0.737555 + }, + { + "accuracy": 0.736842, + "f1": 0.718202, + "f1_weighted": 0.728979 + } + ], + "main_score": 0.728037, + "hf_subset": "de", + "languages": [ + "deu-Latn" + ] } ] }, - "task_name": "MassiveScenarioClassification" + "evaluation_time": 34.10709095001221, + "kg_co2_emissions": null } \ No newline at end of file diff --git a/results/BAAI__bge-m3/5617a9f61b028005a4858fdac845db406aefb181/model_meta.json b/results/BAAI__bge-m3/5617a9f61b028005a4858fdac845db406aefb181/model_meta.json index bb53dd37c1..7782416bb1 100644 --- a/results/BAAI__bge-m3/5617a9f61b028005a4858fdac845db406aefb181/model_meta.json +++ b/results/BAAI__bge-m3/5617a9f61b028005a4858fdac845db406aefb181/model_meta.json @@ -1 +1 @@ -{"name": "BAAI/bge-m3", "revision": "5617a9f61b028005a4858fdac845db406aefb181", "release_date": "2024-02-05", "languages": ["eng_Latn"], "n_parameters": null, "memory_usage": null, "max_tokens": null, "embed_dim": null, "license": null, "open_weights": true, "public_training_data": null, "public_training_code": null, "framework": [], "reference": null, "similarity_fn_name": null, "use_instructions": null, "training_datasets": null, "adapted_from": null, "superseded_by": null, "loader": "BGEM3Wrapper"} \ No newline at end of file +{"name": "BAAI/bge-m3", "revision": "5617a9f61b028005a4858fdac845db406aefb181", "release_date": "2024-06-28", "languages": ["afr_Latn", "amh_Ethi", "azj_Latn", "ast_Latn", "azj_Latn", "ben_Beng", "bul_Cyrl", "bel_Cyrl", "cat_Latn", "ceb_Latn", "ckb_Arab", "dan_Latn", "deu_Latn", "ell_Grek", "eng_Latn", "est_Latn", "fin_Latn", "fra_Latn", "glg_Latn", "guj_Gujr", "heb_Hebr", "hin_Deva", "ita_Latn", "jpn_Jpan", "kor_Hang", "rus_Cyrl", "tha_Thai", "ukr_Cyrl", "zho_Hans"], "n_parameters": 568000000, "max_tokens": 8194.0, "embed_dim": 4096, "license": "mit", "open_weights": true, "public_training_code": null, "framework": ["Sentence Transformers", "PyTorch"], "reference": "https://huggingface.co/BAAI/bge-m3", "similarity_fn_name": "cosine", "use_instructions": false, "training_datasets": {"T2Retrieval": ["train"], "DuReader": ["train"], "MMarcoReranking": ["train"], "CMedQAv2-reranking": ["train"], "HotpotQA": ["train"], "NQ": ["train"], "MSMARCO": ["train"], "MrTidyRetrieval": ["train"], "MIRACLRetrieval": ["train"], "CodeSearchNet": ["train"]}, "adapted_from": null, "superseded_by": null, "loader": "sentence_transformers_loader"} \ No newline at end of file diff --git a/results/Cohere__Cohere-embed-multilingual-light-v3.0/1/model_meta.json b/results/Cohere__Cohere-embed-multilingual-light-v3.0/1/model_meta.json index 0ae9287533..c459029a4e 100644 --- a/results/Cohere__Cohere-embed-multilingual-light-v3.0/1/model_meta.json +++ b/results/Cohere__Cohere-embed-multilingual-light-v3.0/1/model_meta.json @@ -1 +1 @@ -{"name": "Cohere/Cohere-embed-multilingual-light-v3.0", "revision": "1", "release_date": "2023-11-02", "languages": ["afr-Latn", "amh-Ethi", "ara-Arab", "asm-Beng", "aze-Latn", "bel-Cyrl", "bul-Cyrl", "ben-Beng", "bod-Tibt", "bos-Latn", "cat-Latn", "ceb-Latn", "cos-Latn", "ces-Latn", "cym-Latn", "dan-Latn", "deu-Latn", "ell-Grek", "eng-Latn", "epo-Latn", "spa-Latn", "est-Latn", "eus-Latn", "fas-Arab", "fin-Latn", "fra-Latn", "fry-Latn", "gle-Latn", "gla-Latn", "glg-Latn", "guj-Gujr", "hau-Latn", "haw-Latn", "heb-Hebr", "hin-Deva", "hmn-Latn", "hrv-Latn", "hat-Latn", "hun-Latn", "hye-Armn", "ind-Latn", "ibo-Latn", "isl-Latn", "ita-Latn", "jpn-Jpan", "jav-Latn", "kat-Geor", "kaz-Cyrl", "khm-Khmr", "kan-Knda", "kor-Kore", "kur-Arab", "kir-Cyrl", "lat-Latn", "ltz-Latn", "lao-Laoo", "lit-Latn", "lav-Latn", "mlg-Latn", "mri-Latn", "mkd-Cyrl", "mal-Mlym", "mon-Cyrl", "mar-Deva", "msa-Latn", "mlt-Latn", "mya-Mymr", "nep-Deva", "nld-Latn", "nor-Latn", "nya-Latn", "ori-Orya", "pan-Guru", "pol-Latn", "por-Latn", "ron-Latn", "rus-Cyrl", "kin-Latn", "sin-Sinh", "slk-Latn", "slv-Latn", "smo-Latn", "sna-Latn", "som-Latn", "sqi-Latn", "srp-Cyrl", "sot-Latn", "sun-Latn", "swe-Latn", "swa-Latn", "tam-Taml", "tel-Telu", "tgk-Cyrl", "tha-Thai", "tuk-Latn", "tgl-Latn", "tur-Latn", "tat-Cyrl", "uig-Arab", "ukr-Cyrl", "urd-Arab", "uzb-Latn", "vie-Latn", "wol-Latn", "xho-Latn", "yid-Hebr", "yor-Latn", "zho-Hans", "zul-Latn"], "n_parameters": null, "memory_usage": null, "max_tokens": 512.0, "embed_dim": 384, "license": null, "open_weights": false, "public_training_data": null, "public_training_code": null, "framework": ["API"], "reference": "https://cohere.com/blog/introducing-embed-v3", "similarity_fn_name": "cosine", "use_instructions": true, "training_datasets": null, "adapted_from": null, "superseded_by": null, "loader": "CohereTextEmbeddingModel"} \ No newline at end of file +{"name": "Cohere/Cohere-embed-multilingual-light-v3.0", "revision": "1", "release_date": "2023-11-02", "languages": ["afr-Latn", "amh-Ethi", "ara-Arab", "asm-Beng", "aze-Latn", "bel-Cyrl", "bul-Cyrl", "ben-Beng", "bod-Tibt", "bos-Latn", "cat-Latn", "ceb-Latn", "cos-Latn", "ces-Latn", "cym-Latn", "dan-Latn", "deu-Latn", "ell-Grek", "eng-Latn", "epo-Latn", "spa-Latn", "est-Latn", "eus-Latn", "fas-Arab", "fin-Latn", "fra-Latn", "fry-Latn", "gle-Latn", "gla-Latn", "glg-Latn", "guj-Gujr", "hau-Latn", "haw-Latn", "heb-Hebr", "hin-Deva", "hmn-Latn", "hrv-Latn", "hat-Latn", "hun-Latn", "hye-Armn", "ind-Latn", "ibo-Latn", "isl-Latn", "ita-Latn", "jpn-Jpan", "jav-Latn", "kat-Geor", "kaz-Cyrl", "khm-Khmr", "kan-Knda", "kor-Kore", "kur-Arab", "kir-Cyrl", "lat-Latn", "ltz-Latn", "lao-Laoo", "lit-Latn", "lav-Latn", "mlg-Latn", "mri-Latn", "mkd-Cyrl", "mal-Mlym", "mon-Cyrl", "mar-Deva", "msa-Latn", "mlt-Latn", "mya-Mymr", "nep-Deva", "nld-Latn", "nor-Latn", "nya-Latn", "ori-Orya", "pan-Guru", "pol-Latn", "por-Latn", "ron-Latn", "rus-Cyrl", "kin-Latn", "sin-Sinh", "slk-Latn", "slv-Latn", "smo-Latn", "sna-Latn", "som-Latn", "sqi-Latn", "srp-Cyrl", "sot-Latn", "sun-Latn", "swe-Latn", "swa-Latn", "tam-Taml", "tel-Telu", "tgk-Cyrl", "tha-Thai", "tuk-Latn", "tgl-Latn", "tur-Latn", "tat-Cyrl", "uig-Arab", "ukr-Cyrl", "urd-Arab", "uzb-Latn", "vie-Latn", "wol-Latn", "xho-Latn", "yid-Hebr", "yor-Latn", "zho-Hans", "zul-Latn"], "n_parameters": null, "max_tokens": 512.0, "embed_dim": 384, "license": null, "open_weights": false, "public_training_code": null, "framework": ["API"], "reference": "https://cohere.com/blog/introducing-embed-v3", "similarity_fn_name": "cosine", "use_instructions": true, "training_datasets": null, "adapted_from": null, "superseded_by": null, "loader": "CohereTextEmbeddingModel"} \ No newline at end of file diff --git a/results/Cohere__Cohere-embed-multilingual-v3.0/1/model_meta.json b/results/Cohere__Cohere-embed-multilingual-v3.0/1/model_meta.json index 7be5639723..059c318cfd 100644 --- a/results/Cohere__Cohere-embed-multilingual-v3.0/1/model_meta.json +++ b/results/Cohere__Cohere-embed-multilingual-v3.0/1/model_meta.json @@ -1 +1 @@ -{"name": "Cohere/Cohere-embed-multilingual-v3.0", "revision": "1", "release_date": "2023-11-02", "languages": ["afr-Latn", "amh-Ethi", "ara-Arab", "asm-Beng", "aze-Latn", "bel-Cyrl", "bul-Cyrl", "ben-Beng", "bod-Tibt", "bos-Latn", "cat-Latn", "ceb-Latn", "cos-Latn", "ces-Latn", "cym-Latn", "dan-Latn", "deu-Latn", "ell-Grek", "eng-Latn", "epo-Latn", "spa-Latn", "est-Latn", "eus-Latn", "fas-Arab", "fin-Latn", "fra-Latn", "fry-Latn", "gle-Latn", "gla-Latn", "glg-Latn", "guj-Gujr", "hau-Latn", "haw-Latn", "heb-Hebr", "hin-Deva", "hmn-Latn", "hrv-Latn", "hat-Latn", "hun-Latn", "hye-Armn", "ind-Latn", "ibo-Latn", "isl-Latn", "ita-Latn", "jpn-Jpan", "jav-Latn", "kat-Geor", "kaz-Cyrl", "khm-Khmr", "kan-Knda", "kor-Kore", "kur-Arab", "kir-Cyrl", "lat-Latn", "ltz-Latn", "lao-Laoo", "lit-Latn", "lav-Latn", "mlg-Latn", "mri-Latn", "mkd-Cyrl", "mal-Mlym", "mon-Cyrl", "mar-Deva", "msa-Latn", "mlt-Latn", "mya-Mymr", "nep-Deva", "nld-Latn", "nor-Latn", "nya-Latn", "ori-Orya", "pan-Guru", "pol-Latn", "por-Latn", "ron-Latn", "rus-Cyrl", "kin-Latn", "sin-Sinh", "slk-Latn", "slv-Latn", "smo-Latn", "sna-Latn", "som-Latn", "sqi-Latn", "srp-Cyrl", "sot-Latn", "sun-Latn", "swe-Latn", "swa-Latn", "tam-Taml", "tel-Telu", "tgk-Cyrl", "tha-Thai", "tuk-Latn", "tgl-Latn", "tur-Latn", "tat-Cyrl", "uig-Arab", "ukr-Cyrl", "urd-Arab", "uzb-Latn", "vie-Latn", "wol-Latn", "xho-Latn", "yid-Hebr", "yor-Latn", "zho-Hans", "zul-Latn"], "n_parameters": null, "memory_usage": null, "max_tokens": null, "embed_dim": 512, "license": null, "open_weights": false, "public_training_data": null, "public_training_code": null, "framework": ["API"], "reference": "https://cohere.com/blog/introducing-embed-v3", "similarity_fn_name": "cosine", "use_instructions": true, "training_datasets": null, "adapted_from": null, "superseded_by": null, "loader": "CohereTextEmbeddingModel"} \ No newline at end of file +{"name": "Cohere/Cohere-embed-multilingual-v3.0", "revision": "1", "release_date": "2023-11-02", "languages": ["afr-Latn", "amh-Ethi", "ara-Arab", "asm-Beng", "aze-Latn", "bel-Cyrl", "bul-Cyrl", "ben-Beng", "bod-Tibt", "bos-Latn", "cat-Latn", "ceb-Latn", "cos-Latn", "ces-Latn", "cym-Latn", "dan-Latn", "deu-Latn", "ell-Grek", "eng-Latn", "epo-Latn", "spa-Latn", "est-Latn", "eus-Latn", "fas-Arab", "fin-Latn", "fra-Latn", "fry-Latn", "gle-Latn", "gla-Latn", "glg-Latn", "guj-Gujr", "hau-Latn", "haw-Latn", "heb-Hebr", "hin-Deva", "hmn-Latn", "hrv-Latn", "hat-Latn", "hun-Latn", "hye-Armn", "ind-Latn", "ibo-Latn", "isl-Latn", "ita-Latn", "jpn-Jpan", "jav-Latn", "kat-Geor", "kaz-Cyrl", "khm-Khmr", "kan-Knda", "kor-Kore", "kur-Arab", "kir-Cyrl", "lat-Latn", "ltz-Latn", "lao-Laoo", "lit-Latn", "lav-Latn", "mlg-Latn", "mri-Latn", "mkd-Cyrl", "mal-Mlym", "mon-Cyrl", "mar-Deva", "msa-Latn", "mlt-Latn", "mya-Mymr", "nep-Deva", "nld-Latn", "nor-Latn", "nya-Latn", "ori-Orya", "pan-Guru", "pol-Latn", "por-Latn", "ron-Latn", "rus-Cyrl", "kin-Latn", "sin-Sinh", "slk-Latn", "slv-Latn", "smo-Latn", "sna-Latn", "som-Latn", "sqi-Latn", "srp-Cyrl", "sot-Latn", "sun-Latn", "swe-Latn", "swa-Latn", "tam-Taml", "tel-Telu", "tgk-Cyrl", "tha-Thai", "tuk-Latn", "tgl-Latn", "tur-Latn", "tat-Cyrl", "uig-Arab", "ukr-Cyrl", "urd-Arab", "uzb-Latn", "vie-Latn", "wol-Latn", "xho-Latn", "yid-Hebr", "yor-Latn", "zho-Hans", "zul-Latn"], "n_parameters": null, "max_tokens": null, "embed_dim": 512, "license": null, "open_weights": false, "public_training_code": null, "framework": ["API"], "reference": "https://cohere.com/blog/introducing-embed-v3", "similarity_fn_name": "cosine", "use_instructions": true, "training_datasets": null, "adapted_from": null, "superseded_by": null, "loader": "CohereTextEmbeddingModel"} \ No newline at end of file diff --git a/results/Snowflake__snowflake-arctic-embed-l-v2.0/edc2df7b6c25794b340229ca082e7c78782e6374/GermanDPR.json b/results/Snowflake__snowflake-arctic-embed-l-v2.0/edc2df7b6c25794b340229ca082e7c78782e6374/GermanDPR.json new file mode 100644 index 0000000000..006be4541e --- /dev/null +++ b/results/Snowflake__snowflake-arctic-embed-l-v2.0/edc2df7b6c25794b340229ca082e7c78782e6374/GermanDPR.json @@ -0,0 +1,158 @@ +{ + "dataset_revision": "5129d02422a66be600ac89cd3e8531b4f97d347d", + "task_name": "GermanDPR", + "mteb_version": "1.29.10", + "scores": { + "test": [ + { + "ndcg_at_1": 0.67512, + "ndcg_at_3": 0.80062, + "ndcg_at_5": 0.82704, + "ndcg_at_10": 0.83674, + "ndcg_at_20": 0.83976, + "ndcg_at_100": 0.84097, + "ndcg_at_1000": 0.84111, + "map_at_1": 0.67512, + "map_at_3": 0.77041, + "map_at_5": 0.78524, + "map_at_10": 0.78937, + "map_at_20": 0.79024, + "map_at_100": 0.79038, + "map_at_1000": 0.79039, + "recall_at_1": 0.67512, + "recall_at_3": 0.8878, + "recall_at_5": 0.95122, + "recall_at_10": 0.98049, + "recall_at_20": 0.9922, + "recall_at_100": 0.99902, + "recall_at_1000": 1.0, + "precision_at_1": 0.67512, + "precision_at_3": 0.29593, + "precision_at_5": 0.19024, + "precision_at_10": 0.09805, + "precision_at_20": 0.04961, + "precision_at_100": 0.00999, + "precision_at_1000": 0.001, + "mrr_at_1": 0.674146, + "mrr_at_3": 0.769919, + "mrr_at_5": 0.784748, + "mrr_at_10": 0.788886, + "mrr_at_20": 0.78975, + "mrr_at_100": 0.789896, + "mrr_at_1000": 0.789905, + "nauc_ndcg_at_1_max": 0.116589, + "nauc_ndcg_at_1_std": -0.132546, + "nauc_ndcg_at_1_diff1": 0.526583, + "nauc_ndcg_at_3_max": 0.175733, + "nauc_ndcg_at_3_std": -0.124489, + "nauc_ndcg_at_3_diff1": 0.485141, + "nauc_ndcg_at_5_max": 0.166985, + "nauc_ndcg_at_5_std": -0.126915, + "nauc_ndcg_at_5_diff1": 0.507238, + "nauc_ndcg_at_10_max": 0.174544, + "nauc_ndcg_at_10_std": -0.105128, + "nauc_ndcg_at_10_diff1": 0.50522, + "nauc_ndcg_at_20_max": 0.162368, + "nauc_ndcg_at_20_std": -0.114208, + "nauc_ndcg_at_20_diff1": 0.502748, + "nauc_ndcg_at_100_max": 0.157911, + "nauc_ndcg_at_100_std": -0.119919, + "nauc_ndcg_at_100_diff1": 0.504159, + "nauc_ndcg_at_1000_max": 0.157031, + "nauc_ndcg_at_1000_std": -0.121089, + "nauc_ndcg_at_1000_diff1": 0.505275, + "nauc_map_at_1_max": 0.116589, + "nauc_map_at_1_std": -0.132546, + "nauc_map_at_1_diff1": 0.526583, + "nauc_map_at_3_max": 0.157563, + "nauc_map_at_3_std": -0.127467, + "nauc_map_at_3_diff1": 0.496652, + "nauc_map_at_5_max": 0.151622, + "nauc_map_at_5_std": -0.128939, + "nauc_map_at_5_diff1": 0.507892, + "nauc_map_at_10_max": 0.154139, + "nauc_map_at_10_std": -0.121437, + "nauc_map_at_10_diff1": 0.507155, + "nauc_map_at_20_max": 0.151407, + "nauc_map_at_20_std": -0.123781, + "nauc_map_at_20_diff1": 0.506651, + "nauc_map_at_100_max": 0.151002, + "nauc_map_at_100_std": -0.124382, + "nauc_map_at_100_diff1": 0.506897, + "nauc_map_at_1000_max": 0.150959, + "nauc_map_at_1000_std": -0.124439, + "nauc_map_at_1000_diff1": 0.506951, + "nauc_recall_at_1_max": 0.116589, + "nauc_recall_at_1_std": -0.132546, + "nauc_recall_at_1_diff1": 0.526583, + "nauc_recall_at_3_max": 0.27038, + "nauc_recall_at_3_std": -0.108572, + "nauc_recall_at_3_diff1": 0.426163, + "nauc_recall_at_5_max": 0.333143, + "nauc_recall_at_5_std": -0.104512, + "nauc_recall_at_5_diff1": 0.513295, + "nauc_recall_at_10_max": 0.732287, + "nauc_recall_at_10_std": 0.395433, + "nauc_recall_at_10_diff1": 0.481916, + "nauc_recall_at_20_max": 0.718749, + "nauc_recall_at_20_std": 0.564973, + "nauc_recall_at_20_diff1": 0.270563, + "nauc_recall_at_100_max": 1.0, + "nauc_recall_at_100_std": 1.0, + "nauc_recall_at_100_diff1": -0.563948, + "nauc_recall_at_1000_max": NaN, + "nauc_recall_at_1000_std": NaN, + "nauc_recall_at_1000_diff1": NaN, + "nauc_precision_at_1_max": 0.116589, + "nauc_precision_at_1_std": -0.132546, + "nauc_precision_at_1_diff1": 0.526583, + "nauc_precision_at_3_max": 0.27038, + "nauc_precision_at_3_std": -0.108572, + "nauc_precision_at_3_diff1": 0.426163, + "nauc_precision_at_5_max": 0.333143, + "nauc_precision_at_5_std": -0.104512, + "nauc_precision_at_5_diff1": 0.513295, + "nauc_precision_at_10_max": 0.732287, + "nauc_precision_at_10_std": 0.395433, + "nauc_precision_at_10_diff1": 0.481916, + "nauc_precision_at_20_max": 0.718749, + "nauc_precision_at_20_std": 0.564973, + "nauc_precision_at_20_diff1": 0.270563, + "nauc_precision_at_100_max": 1.0, + "nauc_precision_at_100_std": 1.0, + "nauc_precision_at_100_diff1": -0.563948, + "nauc_precision_at_1000_max": NaN, + "nauc_precision_at_1000_std": NaN, + "nauc_precision_at_1000_diff1": NaN, + "nauc_mrr_at_1_max": 0.112249, + "nauc_mrr_at_1_std": -0.138482, + "nauc_mrr_at_1_diff1": 0.52911, + "nauc_mrr_at_3_max": 0.154564, + "nauc_mrr_at_3_std": -0.1315, + "nauc_mrr_at_3_diff1": 0.498405, + "nauc_mrr_at_5_max": 0.148401, + "nauc_mrr_at_5_std": -0.13328, + "nauc_mrr_at_5_diff1": 0.509765, + "nauc_mrr_at_10_max": 0.150842, + "nauc_mrr_at_10_std": -0.125888, + "nauc_mrr_at_10_diff1": 0.509071, + "nauc_mrr_at_20_max": 0.148098, + "nauc_mrr_at_20_std": -0.128249, + "nauc_mrr_at_20_diff1": 0.508576, + "nauc_mrr_at_100_max": 0.147691, + "nauc_mrr_at_100_std": -0.128853, + "nauc_mrr_at_100_diff1": 0.508823, + "nauc_mrr_at_1000_max": 0.147648, + "nauc_mrr_at_1000_std": -0.128909, + "nauc_mrr_at_1000_diff1": 0.508877, + "main_score": 0.83674, + "hf_subset": "default", + "languages": [ + "deu-Latn" + ] + } + ] + }, + "evaluation_time": 203.90697646141052, + "kg_co2_emissions": null +} \ No newline at end of file diff --git a/results/Snowflake__snowflake-arctic-embed-l-v2.0/edc2df7b6c25794b340229ca082e7c78782e6374/model_meta.json b/results/Snowflake__snowflake-arctic-embed-l-v2.0/edc2df7b6c25794b340229ca082e7c78782e6374/model_meta.json index 7412bc141d..1e359355ca 100644 --- a/results/Snowflake__snowflake-arctic-embed-l-v2.0/edc2df7b6c25794b340229ca082e7c78782e6374/model_meta.json +++ b/results/Snowflake__snowflake-arctic-embed-l-v2.0/edc2df7b6c25794b340229ca082e7c78782e6374/model_meta.json @@ -1 +1 @@ -{"name": "Snowflake/snowflake-arctic-embed-l-v2.0", "revision": "edc2df7b6c25794b340229ca082e7c78782e6374", "release_date": "2024-12-04", "languages": ["afr_Latn", "ara_Arab", "aze_Latn", "bel_Cyrl", "bul_Cyrl", "ben_Beng", "cat_Latn", "ceb_Latn", "ces_Latn", "cym_Latn", "dan_Latn", "deu_Latn", "ell_Grek", "eng_Latn", "spa_Latn", "est_Latn", "eus_Latn", "fas_Arab", "fin_Latn", "fra_Latn", "glg_Latn", "guj_Gujr", "heb_Hebr", "hin_Deva", "hrv_Latn", "hat_Latn", "hun_Latn", "hye_Armn", "ind_Latn", "isl_Latn", "ita_Latn", "jpn_Jpan", "jav_Latn", "kat_Geor", "kaz_Cyrl", "khm_Khmr", "kan_Knda", "kor_Hang", "kir_Cyrl", "lao_Laoo", "lit_Latn", "lav_Latn", "mkd_Cyrl", "mal_Mlym", "mon_Cyrl", "mar_Deva", "msa_Latn", "mya_Mymr", "nep_Deva", "nld_Latn", "pan_Guru", "pol_Latn", "por_Latn", "que_Latn", "ron_Latn", "rus_Cyrl", "sin_Sinh", "slk_Latn", "slv_Latn", "som_Latn", "sqi_Latn", "srp_Cyrl", "swe_Latn", "swa_Latn", "tam_Taml", "tel_Telu", "tha_Thai", "tgl_Latn", "tur_Latn", "ukr_Cyrl", "urd_Arab", "vie_Latn", "yor_Latn", "zho_Hans"], "n_parameters": 568000000, "memory_usage": null, "max_tokens": 8192.0, "embed_dim": 1024, "license": "apache-2.0", "open_weights": true, "public_training_data": null, "public_training_code": null, "framework": ["Sentence Transformers", "PyTorch"], "reference": "https://huggingface.co/Snowflake/snowflake-arctic-embed-l-v2.0", "similarity_fn_name": "cosine", "use_instructions": true, "training_datasets": null, "adapted_from": "BAAI/bge-m3-retromae", "superseded_by": null, "loader": "sentence_transformers_loader"} \ No newline at end of file +{"name": "Snowflake/snowflake-arctic-embed-l-v2.0", "revision": "edc2df7b6c25794b340229ca082e7c78782e6374", "release_date": "2024-12-04", "languages": ["afr_Latn", "ara_Arab", "aze_Latn", "bel_Cyrl", "bul_Cyrl", "ben_Beng", "cat_Latn", "ceb_Latn", "ces_Latn", "cym_Latn", "dan_Latn", "deu_Latn", "ell_Grek", "eng_Latn", "spa_Latn", "est_Latn", "eus_Latn", "fas_Arab", "fin_Latn", "fra_Latn", "glg_Latn", "guj_Gujr", "heb_Hebr", "hin_Deva", "hrv_Latn", "hat_Latn", "hun_Latn", "hye_Armn", "ind_Latn", "isl_Latn", "ita_Latn", "jpn_Jpan", "jav_Latn", "kat_Geor", "kaz_Cyrl", "khm_Khmr", "kan_Knda", "kor_Hang", "kir_Cyrl", "lao_Laoo", "lit_Latn", "lav_Latn", "mkd_Cyrl", "mal_Mlym", "mon_Cyrl", "mar_Deva", "msa_Latn", "mya_Mymr", "nep_Deva", "nld_Latn", "pan_Guru", "pol_Latn", "por_Latn", "que_Latn", "ron_Latn", "rus_Cyrl", "sin_Sinh", "slk_Latn", "slv_Latn", "som_Latn", "sqi_Latn", "srp_Cyrl", "swe_Latn", "swa_Latn", "tam_Taml", "tel_Telu", "tha_Thai", "tgl_Latn", "tur_Latn", "ukr_Cyrl", "urd_Arab", "vie_Latn", "yor_Latn", "zho_Hans"], "n_parameters": 568000000, "max_tokens": 8192.0, "embed_dim": 1024, "license": "apache-2.0", "open_weights": true, "public_training_code": null, "framework": ["Sentence Transformers", "PyTorch"], "reference": "https://huggingface.co/Snowflake/snowflake-arctic-embed-l-v2.0", "similarity_fn_name": "cosine", "use_instructions": true, "training_datasets": {"NQ": ["test"], "NQHardNegatives": ["test"], "HotPotQA": ["test"], "HotPotQAHardNegatives": ["test"], "HotPotQA-PL": ["test"], "FEVER": ["test"], "FEVERHardNegatives": ["test"]}, "adapted_from": "BAAI/bge-m3-retromae", "superseded_by": null, "loader": "sentence_transformers_loader"} \ No newline at end of file diff --git a/results/intfloat__multilingual-e5-small/fd1525a9fd15316a2d503bf26ab031a61d056e98/AmazonCounterfactualClassification.json b/results/intfloat__multilingual-e5-small/fd1525a9fd15316a2d503bf26ab031a61d056e98/AmazonCounterfactualClassification.json new file mode 100644 index 0000000000..f96e16c3f1 --- /dev/null +++ b/results/intfloat__multilingual-e5-small/fd1525a9fd15316a2d503bf26ab031a61d056e98/AmazonCounterfactualClassification.json @@ -0,0 +1,181 @@ +{ + "dataset_revision": "e8379541af4e31359cca9fbcf4b00f2671dba205", + "task_name": "AmazonCounterfactualClassification", + "mteb_version": "1.29.10", + "scores": { + "validation": [ + { + "accuracy": 0.705794, + "f1": 0.68743, + "f1_weighted": 0.716737, + "ap": 0.814942, + "ap_weighted": 0.814942, + "scores_per_experiment": [ + { + "accuracy": 0.738197, + "f1": 0.722147, + "f1_weighted": 0.748515, + "ap": 0.840371, + "ap_weighted": 0.840371 + }, + { + "accuracy": 0.736052, + "f1": 0.710274, + "f1_weighted": 0.744397, + "ap": 0.81851, + "ap_weighted": 0.81851 + }, + { + "accuracy": 0.759657, + "f1": 0.731876, + "f1_weighted": 0.765954, + "ap": 0.828119, + "ap_weighted": 0.828119 + }, + { + "accuracy": 0.654506, + "f1": 0.642616, + "f1_weighted": 0.668355, + "ap": 0.796669, + "ap_weighted": 0.796669 + }, + { + "accuracy": 0.718884, + "f1": 0.705028, + "f1_weighted": 0.730271, + "ap": 0.83386, + "ap_weighted": 0.83386 + }, + { + "accuracy": 0.688841, + "f1": 0.670125, + "f1_weighted": 0.70115, + "ap": 0.803576, + "ap_weighted": 0.803576 + }, + { + "accuracy": 0.708155, + "f1": 0.688159, + "f1_weighted": 0.719338, + "ap": 0.81283, + "ap_weighted": 0.81283 + }, + { + "accuracy": 0.639485, + "f1": 0.627965, + "f1_weighted": 0.653814, + "ap": 0.787802, + "ap_weighted": 0.787802 + }, + { + "accuracy": 0.751073, + "f1": 0.726376, + "f1_weighted": 0.758835, + "ap": 0.828942, + "ap_weighted": 0.828942 + }, + { + "accuracy": 0.66309, + "f1": 0.649733, + "f1_weighted": 0.676741, + "ap": 0.798744, + "ap_weighted": 0.798744 + } + ], + "main_score": 0.705794, + "hf_subset": "de", + "languages": [ + "deu-Latn" + ] + } + ], + "test": [ + { + "accuracy": 0.698501, + "f1": 0.68184, + "f1_weighted": 0.709785, + "ap": 0.812608, + "ap_weighted": 0.812608, + "scores_per_experiment": [ + { + "accuracy": 0.702355, + "f1": 0.686188, + "f1_weighted": 0.7141, + "ap": 0.815494, + "ap_weighted": 0.815494 + }, + { + "accuracy": 0.711991, + "f1": 0.688399, + "f1_weighted": 0.721998, + "ap": 0.806709, + "ap_weighted": 0.806709 + }, + { + "accuracy": 0.760171, + "f1": 0.734972, + "f1_weighted": 0.766996, + "ap": 0.831388, + "ap_weighted": 0.831388 + }, + { + "accuracy": 0.692719, + "f1": 0.681273, + "f1_weighted": 0.704942, + "ap": 0.821292, + "ap_weighted": 0.821292 + }, + { + "accuracy": 0.705567, + "f1": 0.694079, + "f1_weighted": 0.71731, + "ap": 0.829876, + "ap_weighted": 0.829876 + }, + { + "accuracy": 0.718415, + "f1": 0.701393, + "f1_weighted": 0.729331, + "ap": 0.823653, + "ap_weighted": 0.823653 + }, + { + "accuracy": 0.665953, + "f1": 0.647078, + "f1_weighted": 0.679061, + "ap": 0.787511, + "ap_weighted": 0.787511 + }, + { + "accuracy": 0.642398, + "f1": 0.632931, + "f1_weighted": 0.656032, + "ap": 0.792926, + "ap_weighted": 0.792926 + }, + { + "accuracy": 0.731263, + "f1": 0.708501, + "f1_weighted": 0.740421, + "ap": 0.819467, + "ap_weighted": 0.819467 + }, + { + "accuracy": 0.654176, + "f1": 0.643585, + "f1_weighted": 0.66766, + "ap": 0.797767, + "ap_weighted": 0.797767 + } + ], + "main_score": 0.698501, + "hf_subset": "de", + "languages": [ + "deu-Latn" + ] + } + ] + }, + "evaluation_time": 85.73426079750061, + "kg_co2_emissions": null +} \ No newline at end of file diff --git a/results/intfloat__multilingual-e5-small/fd1525a9fd15316a2d503bf26ab031a61d056e98/AmazonReviewsClassification.json b/results/intfloat__multilingual-e5-small/fd1525a9fd15316a2d503bf26ab031a61d056e98/AmazonReviewsClassification.json new file mode 100644 index 0000000000..9c52b3b479 --- /dev/null +++ b/results/intfloat__multilingual-e5-small/fd1525a9fd15316a2d503bf26ab031a61d056e98/AmazonReviewsClassification.json @@ -0,0 +1,137 @@ +{ + "dataset_revision": "1399c76144fd37290681b995c656ef9b2e06e26d", + "task_name": "AmazonReviewsClassification", + "mteb_version": "1.29.10", + "scores": { + "validation": [ + { + "accuracy": 0.40684, + "f1": 0.385448, + "f1_weighted": 0.385448, + "scores_per_experiment": [ + { + "accuracy": 0.444, + "f1": 0.433799, + "f1_weighted": 0.433799 + }, + { + "accuracy": 0.3936, + "f1": 0.373416, + "f1_weighted": 0.373416 + }, + { + "accuracy": 0.3854, + "f1": 0.374231, + "f1_weighted": 0.374231 + }, + { + "accuracy": 0.4334, + "f1": 0.400241, + "f1_weighted": 0.400241 + }, + { + "accuracy": 0.4202, + "f1": 0.38621, + "f1_weighted": 0.38621 + }, + { + "accuracy": 0.4074, + "f1": 0.383979, + "f1_weighted": 0.383979 + }, + { + "accuracy": 0.3426, + "f1": 0.330814, + "f1_weighted": 0.330814 + }, + { + "accuracy": 0.4326, + "f1": 0.402513, + "f1_weighted": 0.402513 + }, + { + "accuracy": 0.3986, + "f1": 0.375458, + "f1_weighted": 0.375458 + }, + { + "accuracy": 0.4106, + "f1": 0.393819, + "f1_weighted": 0.393819 + } + ], + "main_score": 0.40684, + "hf_subset": "de", + "languages": [ + "deu-Latn" + ] + } + ], + "test": [ + { + "accuracy": 0.40142, + "f1": 0.380438, + "f1_weighted": 0.380438, + "scores_per_experiment": [ + { + "accuracy": 0.4248, + "f1": 0.415297, + "f1_weighted": 0.415297 + }, + { + "accuracy": 0.411, + "f1": 0.392726, + "f1_weighted": 0.392726 + }, + { + "accuracy": 0.3828, + "f1": 0.371937, + "f1_weighted": 0.371937 + }, + { + "accuracy": 0.4214, + "f1": 0.38879, + "f1_weighted": 0.38879 + }, + { + "accuracy": 0.4134, + "f1": 0.377413, + "f1_weighted": 0.377413 + }, + { + "accuracy": 0.39, + "f1": 0.364907, + "f1_weighted": 0.364907 + }, + { + "accuracy": 0.3464, + "f1": 0.332527, + "f1_weighted": 0.332527 + }, + { + "accuracy": 0.4312, + "f1": 0.404496, + "f1_weighted": 0.404496 + }, + { + "accuracy": 0.3938, + "f1": 0.369978, + "f1_weighted": 0.369978 + }, + { + "accuracy": 0.3994, + "f1": 0.386307, + "f1_weighted": 0.386307 + } + ], + "main_score": 0.40142, + "hf_subset": "de", + "languages": [ + "deu-Latn" + ] + } + ] + }, + "evaluation_time": 39.509817361831665, + "kg_co2_emissions": null +} \ No newline at end of file diff --git a/results/intfloat__multilingual-e5-small/fd1525a9fd15316a2d503bf26ab031a61d056e98/FalseFriendsGermanEnglish.json b/results/intfloat__multilingual-e5-small/fd1525a9fd15316a2d503bf26ab031a61d056e98/FalseFriendsGermanEnglish.json new file mode 100644 index 0000000000..44c5498e9d --- /dev/null +++ b/results/intfloat__multilingual-e5-small/fd1525a9fd15316a2d503bf26ab031a61d056e98/FalseFriendsGermanEnglish.json @@ -0,0 +1,58 @@ +{ + "dataset_revision": "15d6c030d3336cbb09de97b2cefc46db93262d40", + "task_name": "FalseFriendsGermanEnglish", + "mteb_version": "1.29.10", + "scores": { + "test": [ + { + "similarity_accuracy": 0.50853, + "similarity_accuracy_threshold": 0.961091, + "similarity_f1": 0.666958, + "similarity_f1_threshold": 0.737133, + "similarity_precision": 0.500328, + "similarity_recall": 1.0, + "similarity_ap": 0.496637, + "cosine_accuracy": 0.50853, + "cosine_accuracy_threshold": 0.96109, + "cosine_f1": 0.666958, + "cosine_f1_threshold": 0.737133, + "cosine_precision": 0.500328, + "cosine_recall": 1.0, + "cosine_ap": 0.496637, + "manhattan_accuracy": 0.50853, + "manhattan_accuracy_threshold": 3.618259, + "manhattan_f1": 0.666959, + "manhattan_f1_threshold": 11.207991, + "manhattan_precision": 0.500658, + "manhattan_recall": 0.998688, + "manhattan_ap": 0.496999, + "euclidean_accuracy": 0.50853, + "euclidean_accuracy_threshold": 0.278961, + "euclidean_f1": 0.666958, + "euclidean_f1_threshold": 0.725073, + "euclidean_precision": 0.500328, + "euclidean_recall": 1.0, + "euclidean_ap": 0.496637, + "dot_accuracy": 0.50853, + "dot_accuracy_threshold": 0.96109, + "dot_f1": 0.666958, + "dot_f1_threshold": 0.737133, + "dot_precision": 0.500328, + "dot_recall": 1.0, + "dot_ap": 0.496637, + "max_accuracy": 0.50853, + "max_f1": 0.666959, + "max_precision": 0.500658, + "max_recall": 1.0, + "max_ap": 0.496999, + "main_score": 0.496999, + "hf_subset": "default", + "languages": [ + "deu-Latn" + ] + } + ] + }, + "evaluation_time": 1.0718636512756348, + "kg_co2_emissions": null +} \ No newline at end of file diff --git a/results/intfloat__multilingual-e5-small/fd1525a9fd15316a2d503bf26ab031a61d056e98/GermanDPR.json b/results/intfloat__multilingual-e5-small/fd1525a9fd15316a2d503bf26ab031a61d056e98/GermanDPR.json new file mode 100644 index 0000000000..6cd854ab57 --- /dev/null +++ b/results/intfloat__multilingual-e5-small/fd1525a9fd15316a2d503bf26ab031a61d056e98/GermanDPR.json @@ -0,0 +1,158 @@ +{ + "dataset_revision": "5129d02422a66be600ac89cd3e8531b4f97d347d", + "task_name": "GermanDPR", + "mteb_version": "1.29.10", + "scores": { + "test": [ + { + "ndcg_at_1": 0.5922, + "ndcg_at_3": 0.74547, + "ndcg_at_5": 0.77519, + "ndcg_at_10": 0.78938, + "ndcg_at_20": 0.79401, + "ndcg_at_100": 0.79528, + "ndcg_at_1000": 0.79581, + "map_at_1": 0.5922, + "map_at_3": 0.70715, + "map_at_5": 0.72364, + "map_at_10": 0.72968, + "map_at_20": 0.73105, + "map_at_100": 0.73123, + "map_at_1000": 0.73125, + "recall_at_1": 0.5922, + "recall_at_3": 0.85659, + "recall_at_5": 0.92878, + "recall_at_10": 0.97171, + "recall_at_20": 0.98927, + "recall_at_100": 0.9961, + "recall_at_1000": 1.0, + "precision_at_1": 0.5922, + "precision_at_3": 0.28553, + "precision_at_5": 0.18576, + "precision_at_10": 0.09717, + "precision_at_20": 0.04946, + "precision_at_100": 0.00996, + "precision_at_1000": 0.001, + "mrr_at_1": 0.592195, + "mrr_at_3": 0.707154, + "mrr_at_5": 0.723642, + "mrr_at_10": 0.729678, + "mrr_at_20": 0.731047, + "mrr_at_100": 0.731231, + "mrr_at_1000": 0.731255, + "nauc_ndcg_at_1_max": 0.203407, + "nauc_ndcg_at_1_std": -0.126544, + "nauc_ndcg_at_1_diff1": 0.521519, + "nauc_ndcg_at_3_max": 0.263472, + "nauc_ndcg_at_3_std": -0.129686, + "nauc_ndcg_at_3_diff1": 0.460559, + "nauc_ndcg_at_5_max": 0.266253, + "nauc_ndcg_at_5_std": -0.119333, + "nauc_ndcg_at_5_diff1": 0.461402, + "nauc_ndcg_at_10_max": 0.26549, + "nauc_ndcg_at_10_std": -0.098176, + "nauc_ndcg_at_10_diff1": 0.474746, + "nauc_ndcg_at_20_max": 0.25329, + "nauc_ndcg_at_20_std": -0.100426, + "nauc_ndcg_at_20_diff1": 0.475069, + "nauc_ndcg_at_100_max": 0.249986, + "nauc_ndcg_at_100_std": -0.104688, + "nauc_ndcg_at_100_diff1": 0.477726, + "nauc_ndcg_at_1000_max": 0.249041, + "nauc_ndcg_at_1000_std": -0.108078, + "nauc_ndcg_at_1000_diff1": 0.478287, + "nauc_map_at_1_max": 0.203407, + "nauc_map_at_1_std": -0.126544, + "nauc_map_at_1_diff1": 0.521519, + "nauc_map_at_3_max": 0.246318, + "nauc_map_at_3_std": -0.125108, + "nauc_map_at_3_diff1": 0.476715, + "nauc_map_at_5_max": 0.247093, + "nauc_map_at_5_std": -0.119102, + "nauc_map_at_5_diff1": 0.478153, + "nauc_map_at_10_max": 0.246361, + "nauc_map_at_10_std": -0.111469, + "nauc_map_at_10_diff1": 0.482702, + "nauc_map_at_20_max": 0.24351, + "nauc_map_at_20_std": -0.111945, + "nauc_map_at_20_diff1": 0.482763, + "nauc_map_at_100_max": 0.243109, + "nauc_map_at_100_std": -0.1125, + "nauc_map_at_100_diff1": 0.482991, + "nauc_map_at_1000_max": 0.243075, + "nauc_map_at_1000_std": -0.112617, + "nauc_map_at_1000_diff1": 0.48302, + "nauc_recall_at_1_max": 0.203407, + "nauc_recall_at_1_std": -0.126544, + "nauc_recall_at_1_diff1": 0.521519, + "nauc_recall_at_3_max": 0.348888, + "nauc_recall_at_3_std": -0.155436, + "nauc_recall_at_3_diff1": 0.381121, + "nauc_recall_at_5_max": 0.437975, + "nauc_recall_at_5_std": -0.125614, + "nauc_recall_at_5_diff1": 0.312717, + "nauc_recall_at_10_max": 0.682578, + "nauc_recall_at_10_std": 0.234242, + "nauc_recall_at_10_diff1": 0.350383, + "nauc_recall_at_20_max": 0.628913, + "nauc_recall_at_20_std": 0.624248, + "nauc_recall_at_20_diff1": 0.175382, + "nauc_recall_at_100_max": 0.546719, + "nauc_recall_at_100_std": 0.967338, + "nauc_recall_at_100_diff1": 0.320328, + "nauc_recall_at_1000_max": NaN, + "nauc_recall_at_1000_std": NaN, + "nauc_recall_at_1000_diff1": NaN, + "nauc_precision_at_1_max": 0.203407, + "nauc_precision_at_1_std": -0.126544, + "nauc_precision_at_1_diff1": 0.521519, + "nauc_precision_at_3_max": 0.348888, + "nauc_precision_at_3_std": -0.155436, + "nauc_precision_at_3_diff1": 0.381121, + "nauc_precision_at_5_max": 0.437975, + "nauc_precision_at_5_std": -0.125614, + "nauc_precision_at_5_diff1": 0.312717, + "nauc_precision_at_10_max": 0.682578, + "nauc_precision_at_10_std": 0.234242, + "nauc_precision_at_10_diff1": 0.350383, + "nauc_precision_at_20_max": 0.628913, + "nauc_precision_at_20_std": 0.624248, + "nauc_precision_at_20_diff1": 0.175382, + "nauc_precision_at_100_max": 0.546719, + "nauc_precision_at_100_std": 0.967338, + "nauc_precision_at_100_diff1": 0.320328, + "nauc_precision_at_1000_max": NaN, + "nauc_precision_at_1000_std": NaN, + "nauc_precision_at_1000_diff1": NaN, + "nauc_mrr_at_1_max": 0.203407, + "nauc_mrr_at_1_std": -0.126544, + "nauc_mrr_at_1_diff1": 0.521519, + "nauc_mrr_at_3_max": 0.246318, + "nauc_mrr_at_3_std": -0.125108, + "nauc_mrr_at_3_diff1": 0.476715, + "nauc_mrr_at_5_max": 0.247093, + "nauc_mrr_at_5_std": -0.119102, + "nauc_mrr_at_5_diff1": 0.478153, + "nauc_mrr_at_10_max": 0.246361, + "nauc_mrr_at_10_std": -0.111469, + "nauc_mrr_at_10_diff1": 0.482702, + "nauc_mrr_at_20_max": 0.24351, + "nauc_mrr_at_20_std": -0.111945, + "nauc_mrr_at_20_diff1": 0.482763, + "nauc_mrr_at_100_max": 0.243109, + "nauc_mrr_at_100_std": -0.1125, + "nauc_mrr_at_100_diff1": 0.482991, + "nauc_mrr_at_1000_max": 0.243075, + "nauc_mrr_at_1000_std": -0.112617, + "nauc_mrr_at_1000_diff1": 0.48302, + "main_score": 0.78938, + "hf_subset": "default", + "languages": [ + "deu-Latn" + ] + } + ] + }, + "evaluation_time": 13.16852593421936, + "kg_co2_emissions": null +} \ No newline at end of file diff --git a/results/intfloat__multilingual-e5-small/fd1525a9fd15316a2d503bf26ab031a61d056e98/GermanQuAD-Retrieval.json b/results/intfloat__multilingual-e5-small/fd1525a9fd15316a2d503bf26ab031a61d056e98/GermanQuAD-Retrieval.json new file mode 100644 index 0000000000..0b50505d7c --- /dev/null +++ b/results/intfloat__multilingual-e5-small/fd1525a9fd15316a2d503bf26ab031a61d056e98/GermanQuAD-Retrieval.json @@ -0,0 +1,158 @@ +{ + "dataset_revision": "f5c87ae5a2e7a5106606314eef45255f03151bb3", + "task_name": "GermanQuAD-Retrieval", + "mteb_version": "1.29.10", + "scores": { + "test": [ + { + "ndcg_at_1": 0.87205, + "ndcg_at_3": 0.92325, + "ndcg_at_5": 0.92743, + "ndcg_at_10": 0.9314, + "ndcg_at_20": 0.93313, + "ndcg_at_100": 0.93528, + "ndcg_at_1000": 0.93559, + "map_at_1": 0.87205, + "map_at_3": 0.91137, + "map_at_5": 0.91373, + "map_at_10": 0.91537, + "map_at_20": 0.91585, + "map_at_100": 0.91617, + "map_at_1000": 0.91618, + "recall_at_1": 0.87205, + "recall_at_3": 0.95735, + "recall_at_5": 0.96733, + "recall_at_10": 0.97958, + "recall_at_20": 0.98639, + "recall_at_100": 0.99773, + "recall_at_1000": 1.0, + "precision_at_1": 0.87205, + "precision_at_3": 0.31912, + "precision_at_5": 0.19347, + "precision_at_10": 0.09796, + "precision_at_20": 0.04932, + "precision_at_100": 0.00998, + "precision_at_1000": 0.001, + "mrr_at_1": 0.872051, + "mrr_at_3": 0.911373, + "mrr_at_5": 0.913733, + "mrr_at_10": 0.915374, + "mrr_at_20": 0.915852, + "mrr_at_100": 0.916166, + "mrr_at_1000": 0.916182, + "nauc_ndcg_at_1_max": 0.715384, + "nauc_ndcg_at_1_std": -0.153748, + "nauc_ndcg_at_1_diff1": 0.899327, + "nauc_ndcg_at_3_max": 0.778567, + "nauc_ndcg_at_3_std": -0.136698, + "nauc_ndcg_at_3_diff1": 0.887663, + "nauc_ndcg_at_5_max": 0.787372, + "nauc_ndcg_at_5_std": -0.130923, + "nauc_ndcg_at_5_diff1": 0.891482, + "nauc_ndcg_at_10_max": 0.778647, + "nauc_ndcg_at_10_std": -0.131639, + "nauc_ndcg_at_10_diff1": 0.893914, + "nauc_ndcg_at_20_max": 0.773984, + "nauc_ndcg_at_20_std": -0.134365, + "nauc_ndcg_at_20_diff1": 0.893319, + "nauc_ndcg_at_100_max": 0.766479, + "nauc_ndcg_at_100_std": -0.141911, + "nauc_ndcg_at_100_diff1": 0.893434, + "nauc_ndcg_at_1000_max": 0.765444, + "nauc_ndcg_at_1000_std": -0.140652, + "nauc_ndcg_at_1000_diff1": 0.89351, + "nauc_map_at_1_max": 0.715384, + "nauc_map_at_1_std": -0.153748, + "nauc_map_at_1_diff1": 0.899327, + "nauc_map_at_3_max": 0.759775, + "nauc_map_at_3_std": -0.145076, + "nauc_map_at_3_diff1": 0.891059, + "nauc_map_at_5_max": 0.763663, + "nauc_map_at_5_std": -0.142461, + "nauc_map_at_5_diff1": 0.893078, + "nauc_map_at_10_max": 0.760277, + "nauc_map_at_10_std": -0.14281, + "nauc_map_at_10_diff1": 0.893869, + "nauc_map_at_20_max": 0.759133, + "nauc_map_at_20_std": -0.143398, + "nauc_map_at_20_diff1": 0.893755, + "nauc_map_at_100_max": 0.758219, + "nauc_map_at_100_std": -0.144197, + "nauc_map_at_100_diff1": 0.893814, + "nauc_map_at_1000_max": 0.758178, + "nauc_map_at_1000_std": -0.144159, + "nauc_map_at_1000_diff1": 0.893822, + "nauc_recall_at_1_max": 0.715384, + "nauc_recall_at_1_std": -0.153748, + "nauc_recall_at_1_diff1": 0.899327, + "nauc_recall_at_3_max": 0.886372, + "nauc_recall_at_3_std": -0.085411, + "nauc_recall_at_3_diff1": 0.868253, + "nauc_recall_at_5_max": 0.962786, + "nauc_recall_at_5_std": -0.040735, + "nauc_recall_at_5_diff1": 0.881065, + "nauc_recall_at_10_max": 0.978942, + "nauc_recall_at_10_std": 0.003005, + "nauc_recall_at_10_diff1": 0.90083, + "nauc_recall_at_20_max": 0.991287, + "nauc_recall_at_20_std": 0.015147, + "nauc_recall_at_20_diff1": 0.892244, + "nauc_recall_at_100_max": 0.973861, + "nauc_recall_at_100_std": -0.408745, + "nauc_recall_at_100_diff1": 0.884699, + "nauc_recall_at_1000_max": NaN, + "nauc_recall_at_1000_std": NaN, + "nauc_recall_at_1000_diff1": NaN, + "nauc_precision_at_1_max": 0.715384, + "nauc_precision_at_1_std": -0.153748, + "nauc_precision_at_1_diff1": 0.899327, + "nauc_precision_at_3_max": 0.886372, + "nauc_precision_at_3_std": -0.085411, + "nauc_precision_at_3_diff1": 0.868253, + "nauc_precision_at_5_max": 0.962786, + "nauc_precision_at_5_std": -0.040735, + "nauc_precision_at_5_diff1": 0.881065, + "nauc_precision_at_10_max": 0.978942, + "nauc_precision_at_10_std": 0.003005, + "nauc_precision_at_10_diff1": 0.90083, + "nauc_precision_at_20_max": 0.991287, + "nauc_precision_at_20_std": 0.015147, + "nauc_precision_at_20_diff1": 0.892244, + "nauc_precision_at_100_max": 0.973861, + "nauc_precision_at_100_std": -0.408745, + "nauc_precision_at_100_diff1": 0.884699, + "nauc_precision_at_1000_max": 1.0, + "nauc_precision_at_1000_std": 1.0, + "nauc_precision_at_1000_diff1": 1.0, + "nauc_mrr_at_1_max": 0.715384, + "nauc_mrr_at_1_std": -0.153748, + "nauc_mrr_at_1_diff1": 0.899327, + "nauc_mrr_at_3_max": 0.759775, + "nauc_mrr_at_3_std": -0.145076, + "nauc_mrr_at_3_diff1": 0.891059, + "nauc_mrr_at_5_max": 0.763663, + "nauc_mrr_at_5_std": -0.142461, + "nauc_mrr_at_5_diff1": 0.893078, + "nauc_mrr_at_10_max": 0.760277, + "nauc_mrr_at_10_std": -0.14281, + "nauc_mrr_at_10_diff1": 0.893869, + "nauc_mrr_at_20_max": 0.759133, + "nauc_mrr_at_20_std": -0.143398, + "nauc_mrr_at_20_diff1": 0.893755, + "nauc_mrr_at_100_max": 0.758219, + "nauc_mrr_at_100_std": -0.144197, + "nauc_mrr_at_100_diff1": 0.893814, + "nauc_mrr_at_1000_max": 0.758178, + "nauc_mrr_at_1000_std": -0.144159, + "nauc_mrr_at_1000_diff1": 0.893822, + "main_score": 0.913733, + "hf_subset": "default", + "languages": [ + "deu-Latn" + ] + } + ] + }, + "evaluation_time": 5.1020729541778564, + "kg_co2_emissions": null +} \ No newline at end of file diff --git a/results/intfloat__multilingual-e5-small/fd1525a9fd15316a2d503bf26ab031a61d056e98/MTOPDomainClassification.json b/results/intfloat__multilingual-e5-small/fd1525a9fd15316a2d503bf26ab031a61d056e98/MTOPDomainClassification.json new file mode 100644 index 0000000000..f7c1863dca --- /dev/null +++ b/results/intfloat__multilingual-e5-small/fd1525a9fd15316a2d503bf26ab031a61d056e98/MTOPDomainClassification.json @@ -0,0 +1,137 @@ +{ + "dataset_revision": "d80d48c1eb48d3562165c59d59d0034df9fff0bf", + "task_name": "MTOPDomainClassification", + "mteb_version": "1.29.10", + "scores": { + "validation": [ + { + "accuracy": 0.860661, + "f1": 0.84838, + "f1_weighted": 0.860317, + "scores_per_experiment": [ + { + "accuracy": 0.873829, + "f1": 0.862083, + "f1_weighted": 0.873636 + }, + { + "accuracy": 0.839669, + "f1": 0.829691, + "f1_weighted": 0.839844 + }, + { + "accuracy": 0.854545, + "f1": 0.837422, + "f1_weighted": 0.854302 + }, + { + "accuracy": 0.869972, + "f1": 0.857268, + "f1_weighted": 0.868732 + }, + { + "accuracy": 0.872176, + "f1": 0.85866, + "f1_weighted": 0.871064 + }, + { + "accuracy": 0.857851, + "f1": 0.838354, + "f1_weighted": 0.855244 + }, + { + "accuracy": 0.863912, + "f1": 0.854162, + "f1_weighted": 0.863789 + }, + { + "accuracy": 0.842975, + "f1": 0.835944, + "f1_weighted": 0.844849 + }, + { + "accuracy": 0.860055, + "f1": 0.851354, + "f1_weighted": 0.860699 + }, + { + "accuracy": 0.871625, + "f1": 0.858866, + "f1_weighted": 0.871011 + } + ], + "main_score": 0.860661, + "hf_subset": "de", + "languages": [ + "deu-Latn" + ] + } + ], + "test": [ + { + "accuracy": 0.862468, + "f1": 0.847967, + "f1_weighted": 0.861675, + "scores_per_experiment": [ + { + "accuracy": 0.873204, + "f1": 0.8603, + "f1_weighted": 0.872062 + }, + { + "accuracy": 0.840518, + "f1": 0.826207, + "f1_weighted": 0.840088 + }, + { + "accuracy": 0.861651, + "f1": 0.839463, + "f1_weighted": 0.860128 + }, + { + "accuracy": 0.872077, + "f1": 0.859522, + "f1_weighted": 0.871429 + }, + { + "accuracy": 0.871231, + "f1": 0.856149, + "f1_weighted": 0.869944 + }, + { + "accuracy": 0.857706, + "f1": 0.835122, + "f1_weighted": 0.854029 + }, + { + "accuracy": 0.86785, + "f1": 0.8532, + "f1_weighted": 0.867412 + }, + { + "accuracy": 0.84869, + "f1": 0.840454, + "f1_weighted": 0.850568 + }, + { + "accuracy": 0.857988, + "f1": 0.848332, + "f1_weighted": 0.85862 + }, + { + "accuracy": 0.873767, + "f1": 0.860924, + "f1_weighted": 0.872476 + } + ], + "main_score": 0.862468, + "hf_subset": "de", + "languages": [ + "deu-Latn" + ] + } + ] + }, + "evaluation_time": 21.148167371749878, + "kg_co2_emissions": null +} \ No newline at end of file diff --git a/results/intfloat__multilingual-e5-small/fd1525a9fd15316a2d503bf26ab031a61d056e98/MTOPIntentClassification.json b/results/intfloat__multilingual-e5-small/fd1525a9fd15316a2d503bf26ab031a61d056e98/MTOPIntentClassification.json new file mode 100644 index 0000000000..a37882df32 --- /dev/null +++ b/results/intfloat__multilingual-e5-small/fd1525a9fd15316a2d503bf26ab031a61d056e98/MTOPIntentClassification.json @@ -0,0 +1,137 @@ +{ + "dataset_revision": "ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba", + "task_name": "MTOPIntentClassification", + "mteb_version": "1.29.10", + "scores": { + "validation": [ + { + "accuracy": 0.557906, + "f1": 0.350758, + "f1_weighted": 0.601537, + "scores_per_experiment": [ + { + "accuracy": 0.548209, + "f1": 0.348813, + "f1_weighted": 0.596274 + }, + { + "accuracy": 0.560882, + "f1": 0.369328, + "f1_weighted": 0.608902 + }, + { + "accuracy": 0.560331, + "f1": 0.361627, + "f1_weighted": 0.610043 + }, + { + "accuracy": 0.550964, + "f1": 0.334527, + "f1_weighted": 0.585601 + }, + { + "accuracy": 0.577961, + "f1": 0.36162, + "f1_weighted": 0.620009 + }, + { + "accuracy": 0.53719, + "f1": 0.348759, + "f1_weighted": 0.567439 + }, + { + "accuracy": 0.577961, + "f1": 0.334588, + "f1_weighted": 0.618905 + }, + { + "accuracy": 0.572452, + "f1": 0.351694, + "f1_weighted": 0.619202 + }, + { + "accuracy": 0.521212, + "f1": 0.343009, + "f1_weighted": 0.569724 + }, + { + "accuracy": 0.571901, + "f1": 0.35361, + "f1_weighted": 0.619271 + } + ], + "main_score": 0.557906, + "hf_subset": "de", + "languages": [ + "deu-Latn" + ] + } + ], + "test": [ + { + "accuracy": 0.570668, + "f1": 0.358673, + "f1_weighted": 0.61582, + "scores_per_experiment": [ + { + "accuracy": 0.572837, + "f1": 0.371875, + "f1_weighted": 0.619742 + }, + { + "accuracy": 0.559594, + "f1": 0.363088, + "f1_weighted": 0.611641 + }, + { + "accuracy": 0.57481, + "f1": 0.352673, + "f1_weighted": 0.624732 + }, + { + "accuracy": 0.551423, + "f1": 0.345272, + "f1_weighted": 0.591735 + }, + { + "accuracy": 0.582981, + "f1": 0.365224, + "f1_weighted": 0.628691 + }, + { + "accuracy": 0.538743, + "f1": 0.350986, + "f1_weighted": 0.570269 + }, + { + "accuracy": 0.595097, + "f1": 0.35422, + "f1_weighted": 0.638484 + }, + { + "accuracy": 0.600451, + "f1": 0.365976, + "f1_weighted": 0.648315 + }, + { + "accuracy": 0.53339, + "f1": 0.349534, + "f1_weighted": 0.58278 + }, + { + "accuracy": 0.597351, + "f1": 0.367881, + "f1_weighted": 0.641814 + } + ], + "main_score": 0.570668, + "hf_subset": "de", + "languages": [ + "deu-Latn" + ] + } + ] + }, + "evaluation_time": 28.479371786117554, + "kg_co2_emissions": null +} \ No newline at end of file diff --git a/results/intfloat__multilingual-e5-small/fd1525a9fd15316a2d503bf26ab031a61d056e98/PawsXPairClassification.json b/results/intfloat__multilingual-e5-small/fd1525a9fd15316a2d503bf26ab031a61d056e98/PawsXPairClassification.json new file mode 100644 index 0000000000..af6b803059 --- /dev/null +++ b/results/intfloat__multilingual-e5-small/fd1525a9fd15316a2d503bf26ab031a61d056e98/PawsXPairClassification.json @@ -0,0 +1,107 @@ +{ + "dataset_revision": "8a04d940a42cd40658986fdd8e3da561533a3646", + "task_name": "PawsXPairClassification", + "mteb_version": "1.29.10", + "scores": { + "test": [ + { + "similarity_accuracy": 0.5845, + "similarity_accuracy_threshold": 0.992572, + "similarity_f1": 0.624245, + "similarity_f1_threshold": 0.929772, + "similarity_precision": 0.457769, + "similarity_recall": 0.981006, + "similarity_ap": 0.541329, + "cosine_accuracy": 0.5845, + "cosine_accuracy_threshold": 0.992572, + "cosine_f1": 0.624245, + "cosine_f1_threshold": 0.929772, + "cosine_precision": 0.457769, + "cosine_recall": 0.981006, + "cosine_ap": 0.538849, + "manhattan_accuracy": 0.5855, + "manhattan_accuracy_threshold": 1.898788, + "manhattan_f1": 0.624108, + "manhattan_f1_threshold": 5.749293, + "manhattan_precision": 0.458355, + "manhattan_recall": 0.977654, + "manhattan_ap": 0.539746, + "euclidean_accuracy": 0.5845, + "euclidean_accuracy_threshold": 0.121885, + "euclidean_f1": 0.624245, + "euclidean_f1_threshold": 0.374775, + "euclidean_precision": 0.457769, + "euclidean_recall": 0.981006, + "euclidean_ap": 0.538848, + "dot_accuracy": 0.5845, + "dot_accuracy_threshold": 0.992572, + "dot_f1": 0.624245, + "dot_f1_threshold": 0.929772, + "dot_precision": 0.457769, + "dot_recall": 0.981006, + "dot_ap": 0.53894, + "max_accuracy": 0.5855, + "max_f1": 0.624245, + "max_precision": 0.458355, + "max_recall": 0.981006, + "max_ap": 0.541329, + "main_score": 0.541329, + "hf_subset": "de", + "languages": [ + "deu-Latn" + ] + } + ], + "validation": [ + { + "similarity_accuracy": 0.5965, + "similarity_accuracy_threshold": 0.999701, + "similarity_f1": 0.599567, + "similarity_f1_threshold": 0.875372, + "similarity_precision": 0.42813, + "similarity_recall": 1.0, + "similarity_ap": 0.487436, + "cosine_accuracy": 0.5965, + "cosine_accuracy_threshold": 0.999701, + "cosine_f1": 0.599567, + "cosine_f1_threshold": 0.875372, + "cosine_precision": 0.42813, + "cosine_recall": 1.0, + "cosine_ap": 0.482428, + "manhattan_accuracy": 0.5965, + "manhattan_accuracy_threshold": 0.386895, + "manhattan_f1": 0.599783, + "manhattan_f1_threshold": 7.41747, + "manhattan_precision": 0.428351, + "manhattan_recall": 1.0, + "manhattan_ap": 0.483387, + "euclidean_accuracy": 0.5965, + "euclidean_accuracy_threshold": 0.024388, + "euclidean_f1": 0.599567, + "euclidean_f1_threshold": 0.498898, + "euclidean_precision": 0.42813, + "euclidean_recall": 1.0, + "euclidean_ap": 0.482428, + "dot_accuracy": 0.5985, + "dot_accuracy_threshold": 1.0, + "dot_f1": 0.599567, + "dot_f1_threshold": 0.875372, + "dot_precision": 0.42813, + "dot_recall": 1.0, + "dot_ap": 0.491956, + "max_accuracy": 0.5985, + "max_f1": 0.599783, + "max_precision": 0.428351, + "max_recall": 1.0, + "max_ap": 0.491956, + "main_score": 0.491956, + "hf_subset": "de", + "languages": [ + "deu-Latn" + ] + } + ] + }, + "evaluation_time": 5.304517507553101, + "kg_co2_emissions": null +} \ No newline at end of file diff --git a/results/intfloat__multilingual-e5-small/fd1525a9fd15316a2d503bf26ab031a61d056e98/XMarket.json b/results/intfloat__multilingual-e5-small/fd1525a9fd15316a2d503bf26ab031a61d056e98/XMarket.json new file mode 100644 index 0000000000..5c27ce421b --- /dev/null +++ b/results/intfloat__multilingual-e5-small/fd1525a9fd15316a2d503bf26ab031a61d056e98/XMarket.json @@ -0,0 +1,158 @@ +{ + "dataset_revision": "dfe57acff5b62c23732a7b7d3e3fb84ff501708b", + "task_name": "XMarket", + "mteb_version": "1.29.10", + "scores": { + "test": [ + { + "ndcg_at_1": 0.1516, + "ndcg_at_3": 0.14916, + "ndcg_at_5": 0.15081, + "ndcg_at_10": 0.15546, + "ndcg_at_20": 0.16259, + "ndcg_at_100": 0.18567, + "ndcg_at_1000": 0.22237, + "map_at_1": 0.04396, + "map_at_3": 0.06778, + "map_at_5": 0.07767, + "map_at_10": 0.08817, + "map_at_20": 0.09588, + "map_at_100": 0.10507, + "map_at_1000": 0.10942, + "recall_at_1": 0.04396, + "recall_at_3": 0.08557, + "recall_at_5": 0.11032, + "recall_at_10": 0.14697, + "recall_at_20": 0.18567, + "recall_at_100": 0.28615, + "recall_at_1000": 0.458, + "precision_at_1": 0.1516, + "precision_at_3": 0.12336, + "precision_at_5": 0.10711, + "precision_at_10": 0.08449, + "precision_at_20": 0.06329, + "precision_at_100": 0.02857, + "precision_at_1000": 0.00812, + "mrr_at_1": 0.151598, + "mrr_at_3": 0.196598, + "mrr_at_5": 0.208104, + "mrr_at_10": 0.216678, + "mrr_at_20": 0.220609, + "mrr_at_100": 0.223962, + "mrr_at_1000": 0.224626, + "nauc_ndcg_at_1_max": 0.203044, + "nauc_ndcg_at_1_std": 0.240929, + "nauc_ndcg_at_1_diff1": 0.167017, + "nauc_ndcg_at_3_max": 0.192645, + "nauc_ndcg_at_3_std": 0.256287, + "nauc_ndcg_at_3_diff1": 0.13872, + "nauc_ndcg_at_5_max": 0.187711, + "nauc_ndcg_at_5_std": 0.260568, + "nauc_ndcg_at_5_diff1": 0.136172, + "nauc_ndcg_at_10_max": 0.177508, + "nauc_ndcg_at_10_std": 0.259459, + "nauc_ndcg_at_10_diff1": 0.133588, + "nauc_ndcg_at_20_max": 0.169441, + "nauc_ndcg_at_20_std": 0.257989, + "nauc_ndcg_at_20_diff1": 0.13327, + "nauc_ndcg_at_100_max": 0.160949, + "nauc_ndcg_at_100_std": 0.266419, + "nauc_ndcg_at_100_diff1": 0.128889, + "nauc_ndcg_at_1000_max": 0.166864, + "nauc_ndcg_at_1000_std": 0.296552, + "nauc_ndcg_at_1000_diff1": 0.121614, + "nauc_map_at_1_max": 0.089683, + "nauc_map_at_1_std": 0.041819, + "nauc_map_at_1_diff1": 0.302405, + "nauc_map_at_3_max": 0.09996, + "nauc_map_at_3_std": 0.07114, + "nauc_map_at_3_diff1": 0.231627, + "nauc_map_at_5_max": 0.110681, + "nauc_map_at_5_std": 0.09655, + "nauc_map_at_5_diff1": 0.208783, + "nauc_map_at_10_max": 0.125103, + "nauc_map_at_10_std": 0.132127, + "nauc_map_at_10_diff1": 0.189581, + "nauc_map_at_20_max": 0.139756, + "nauc_map_at_20_std": 0.163407, + "nauc_map_at_20_diff1": 0.176591, + "nauc_map_at_100_max": 0.156901, + "nauc_map_at_100_std": 0.201285, + "nauc_map_at_100_diff1": 0.163176, + "nauc_map_at_1000_max": 0.165915, + "nauc_map_at_1000_std": 0.220585, + "nauc_map_at_1000_diff1": 0.158211, + "nauc_recall_at_1_max": 0.089683, + "nauc_recall_at_1_std": 0.041819, + "nauc_recall_at_1_diff1": 0.302405, + "nauc_recall_at_3_max": 0.074723, + "nauc_recall_at_3_std": 0.064755, + "nauc_recall_at_3_diff1": 0.19245, + "nauc_recall_at_5_max": 0.076601, + "nauc_recall_at_5_std": 0.091138, + "nauc_recall_at_5_diff1": 0.160936, + "nauc_recall_at_10_max": 0.076469, + "nauc_recall_at_10_std": 0.126776, + "nauc_recall_at_10_diff1": 0.126922, + "nauc_recall_at_20_max": 0.082321, + "nauc_recall_at_20_std": 0.161643, + "nauc_recall_at_20_diff1": 0.113748, + "nauc_recall_at_100_max": 0.079585, + "nauc_recall_at_100_std": 0.207257, + "nauc_recall_at_100_diff1": 0.095673, + "nauc_recall_at_1000_max": 0.066772, + "nauc_recall_at_1000_std": 0.249238, + "nauc_recall_at_1000_diff1": 0.084073, + "nauc_precision_at_1_max": 0.203044, + "nauc_precision_at_1_std": 0.240929, + "nauc_precision_at_1_diff1": 0.167017, + "nauc_precision_at_3_max": 0.222879, + "nauc_precision_at_3_std": 0.317938, + "nauc_precision_at_3_diff1": 0.085462, + "nauc_precision_at_5_max": 0.236988, + "nauc_precision_at_5_std": 0.366147, + "nauc_precision_at_5_diff1": 0.054348, + "nauc_precision_at_10_max": 0.253212, + "nauc_precision_at_10_std": 0.418504, + "nauc_precision_at_10_diff1": 0.023799, + "nauc_precision_at_20_max": 0.266308, + "nauc_precision_at_20_std": 0.456984, + "nauc_precision_at_20_diff1": 0.000257, + "nauc_precision_at_100_max": 0.264595, + "nauc_precision_at_100_std": 0.489513, + "nauc_precision_at_100_diff1": -0.022013, + "nauc_precision_at_1000_max": 0.219897, + "nauc_precision_at_1000_std": 0.412952, + "nauc_precision_at_1000_diff1": -0.056858, + "nauc_mrr_at_1_max": 0.203044, + "nauc_mrr_at_1_std": 0.240929, + "nauc_mrr_at_1_diff1": 0.167017, + "nauc_mrr_at_3_max": 0.190864, + "nauc_mrr_at_3_std": 0.258584, + "nauc_mrr_at_3_diff1": 0.145435, + "nauc_mrr_at_5_max": 0.184383, + "nauc_mrr_at_5_std": 0.260719, + "nauc_mrr_at_5_diff1": 0.139316, + "nauc_mrr_at_10_max": 0.182958, + "nauc_mrr_at_10_std": 0.264431, + "nauc_mrr_at_10_diff1": 0.137192, + "nauc_mrr_at_20_max": 0.182317, + "nauc_mrr_at_20_std": 0.264007, + "nauc_mrr_at_20_diff1": 0.13717, + "nauc_mrr_at_100_max": 0.181592, + "nauc_mrr_at_100_std": 0.264616, + "nauc_mrr_at_100_diff1": 0.13694, + "nauc_mrr_at_1000_max": 0.181664, + "nauc_mrr_at_1000_std": 0.264636, + "nauc_mrr_at_1000_diff1": 0.136895, + "main_score": 0.15546, + "hf_subset": "de", + "languages": [ + "deu-Latn" + ] + } + ] + }, + "evaluation_time": 48.49527955055237, + "kg_co2_emissions": null +} \ No newline at end of file diff --git a/results/intfloat__multilingual-e5-small/fd1525a9fd15316a2d503bf26ab031a61d056e98/model_meta.json b/results/intfloat__multilingual-e5-small/fd1525a9fd15316a2d503bf26ab031a61d056e98/model_meta.json index 8e89e36fef..4a651b25ce 100644 --- a/results/intfloat__multilingual-e5-small/fd1525a9fd15316a2d503bf26ab031a61d056e98/model_meta.json +++ b/results/intfloat__multilingual-e5-small/fd1525a9fd15316a2d503bf26ab031a61d056e98/model_meta.json @@ -1 +1 @@ -{"name": "intfloat/multilingual-e5-small", "revision": "fd1525a9fd15316a2d503bf26ab031a61d056e98", "release_date": "2024-02-08", "languages": ["afr_Latn", "amh_Latn", "ara_Latn", "asm_Latn", "aze_Latn", "bel_Latn", "bul_Latn", "ben_Latn", "ben_Beng", "bre_Latn", "bos_Latn", "cat_Latn", "ces_Latn", "cym_Latn", "dan_Latn", "deu_Latn", "ell_Latn", "eng_Latn", "epo_Latn", "spa_Latn", "est_Latn", "eus_Latn", "fas_Latn", "fin_Latn", "fra_Latn", "fry_Latn", "gle_Latn", "gla_Latn", "glg_Latn", "guj_Latn", "hau_Latn", "heb_Latn", "hin_Latn", "hin_Deva", "hrv_Latn", "hun_Latn", "hye_Latn", "ind_Latn", "isl_Latn", "ita_Latn", "jpn_Latn", "jav_Latn", "kat_Latn", "kaz_Latn", "khm_Latn", "kan_Latn", "kor_Latn", "kur_Latn", "kir_Latn", "lat_Latn", "lao_Latn", "lit_Latn", "lav_Latn", "mlg_Latn", "mkd_Latn", "mal_Latn", "mon_Latn", "mar_Latn", "msa_Latn", "mya_Latn", "nep_Latn", "nld_Latn", "nob_Latn", "orm_Latn", "ori_Latn", "pan_Latn", "pol_Latn", "pus_Latn", "por_Latn", "ron_Latn", "rus_Latn", "san_Latn", "snd_Latn", "sin_Latn", "slk_Latn", "slv_Latn", "som_Latn", "sqi_Latn", "srp_Latn", "sun_Latn", "swe_Latn", "swa_Latn", "tam_Latn", "tam_Taml", "tel_Latn", "tel_Telu", "tha_Latn", "tgl_Latn", "tur_Latn", "uig_Latn", "ukr_Latn", "urd_Latn", "urd_Arab", "uzb_Latn", "vie_Latn", "xho_Latn", "yid_Latn", "zho_Hant", "zho_Hans"], "n_parameters": 118000000, "memory_usage": null, "max_tokens": 512.0, "embed_dim": 384, "license": "mit", "open_weights": true, "public_training_data": null, "public_training_code": null, "framework": ["Sentence Transformers", "PyTorch"], "reference": "https://huggingface.co/intfloat/multilingual-e5-small", "similarity_fn_name": "cosine", "use_instructions": true, "training_datasets": null, "adapted_from": null, "superseded_by": null, "loader": "sentence_transformers_loader"} \ No newline at end of file +{"name": "intfloat/multilingual-e5-small", "revision": "fd1525a9fd15316a2d503bf26ab031a61d056e98", "release_date": "2024-02-08", "languages": ["afr_Latn", "amh_Latn", "ara_Latn", "asm_Latn", "aze_Latn", "bel_Latn", "bul_Latn", "ben_Latn", "ben_Beng", "bre_Latn", "bos_Latn", "cat_Latn", "ces_Latn", "cym_Latn", "dan_Latn", "deu_Latn", "ell_Latn", "eng_Latn", "epo_Latn", "spa_Latn", "est_Latn", "eus_Latn", "fas_Latn", "fin_Latn", "fra_Latn", "fry_Latn", "gle_Latn", "gla_Latn", "glg_Latn", "guj_Latn", "hau_Latn", "heb_Latn", "hin_Latn", "hin_Deva", "hrv_Latn", "hun_Latn", "hye_Latn", "ind_Latn", "isl_Latn", "ita_Latn", "jpn_Latn", "jav_Latn", "kat_Latn", "kaz_Latn", "khm_Latn", "kan_Latn", "kor_Latn", "kur_Latn", "kir_Latn", "lat_Latn", "lao_Latn", "lit_Latn", "lav_Latn", "mlg_Latn", "mkd_Latn", "mal_Latn", "mon_Latn", "mar_Latn", "msa_Latn", "mya_Latn", "nep_Latn", "nld_Latn", "nob_Latn", "orm_Latn", "ori_Latn", "pan_Latn", "pol_Latn", "pus_Latn", "por_Latn", "ron_Latn", "rus_Latn", "san_Latn", "snd_Latn", "sin_Latn", "slk_Latn", "slv_Latn", "som_Latn", "sqi_Latn", "srp_Latn", "sun_Latn", "swe_Latn", "swa_Latn", "tam_Latn", "tam_Taml", "tel_Latn", "tel_Telu", "tha_Latn", "tgl_Latn", "tur_Latn", "uig_Latn", "ukr_Latn", "urd_Latn", "urd_Arab", "uzb_Latn", "vie_Latn", "xho_Latn", "yid_Latn", "zho_Hant", "zho_Hans"], "n_parameters": 118000000, "max_tokens": 512.0, "embed_dim": 384, "license": "mit", "open_weights": true, "public_training_code": null, "framework": ["Sentence Transformers", "PyTorch"], "reference": "https://huggingface.co/intfloat/multilingual-e5-small", "similarity_fn_name": "cosine", "use_instructions": true, "training_datasets": {"MSMARCO": ["train"], "MSMARCOHardNegatives": ["train"], "NanoMSMARCORetrieval": ["train"], "MSMARCO-PL": ["train"], "NQ": ["train"], "NQHardNegatives": ["train"], "NanoNQRetrieval": ["train"], "NQ-PL": ["train"]}, "adapted_from": null, "superseded_by": null, "loader": "sentence_transformers_loader"} \ No newline at end of file diff --git a/results/shibing624__text2vec-base-multilingual/6633dc49e554de7105458f8f2e96445c6598e9d1/AmazonCounterfactualClassification.json b/results/shibing624__text2vec-base-multilingual/6633dc49e554de7105458f8f2e96445c6598e9d1/AmazonCounterfactualClassification.json new file mode 100644 index 0000000000..a3e5960e38 --- /dev/null +++ b/results/shibing624__text2vec-base-multilingual/6633dc49e554de7105458f8f2e96445c6598e9d1/AmazonCounterfactualClassification.json @@ -0,0 +1,181 @@ +{ + "dataset_revision": "e8379541af4e31359cca9fbcf4b00f2671dba205", + "task_name": "AmazonCounterfactualClassification", + "mteb_version": "1.29.10", + "scores": { + "validation": [ + { + "accuracy": 0.695279, + "f1": 0.673793, + "f1_weighted": 0.706517, + "ap": 0.803739, + "ap_weighted": 0.803739, + "scores_per_experiment": [ + { + "accuracy": 0.699571, + "f1": 0.681843, + "f1_weighted": 0.711497, + "ap": 0.812194, + "ap_weighted": 0.812194 + }, + { + "accuracy": 0.714592, + "f1": 0.678048, + "f1_weighted": 0.720877, + "ap": 0.791447, + "ap_weighted": 0.791447 + }, + { + "accuracy": 0.714592, + "f1": 0.690102, + "f1_weighted": 0.7245, + "ap": 0.80844, + "ap_weighted": 0.80844 + }, + { + "accuracy": 0.693133, + "f1": 0.680404, + "f1_weighted": 0.705588, + "ap": 0.819962, + "ap_weighted": 0.819962 + }, + { + "accuracy": 0.706009, + "f1": 0.683993, + "f1_weighted": 0.716927, + "ap": 0.807738, + "ap_weighted": 0.807738 + }, + { + "accuracy": 0.716738, + "f1": 0.70189, + "f1_weighted": 0.72816, + "ap": 0.829804, + "ap_weighted": 0.829804 + }, + { + "accuracy": 0.695279, + "f1": 0.673637, + "f1_weighted": 0.706821, + "ap": 0.802038, + "ap_weighted": 0.802038 + }, + { + "accuracy": 0.60515, + "f1": 0.581851, + "f1_weighted": 0.620824, + "ap": 0.747753, + "ap_weighted": 0.747753 + }, + { + "accuracy": 0.712446, + "f1": 0.688179, + "f1_weighted": 0.722527, + "ap": 0.807575, + "ap_weighted": 0.807575 + }, + { + "accuracy": 0.695279, + "f1": 0.677983, + "f1_weighted": 0.70745, + "ap": 0.810439, + "ap_weighted": 0.810439 + } + ], + "main_score": 0.695279, + "hf_subset": "de", + "languages": [ + "deu-Latn" + ] + } + ], + "test": [ + { + "accuracy": 0.687045, + "f1": 0.665641, + "f1_weighted": 0.698448, + "ap": 0.796931, + "ap_weighted": 0.796931, + "scores_per_experiment": [ + { + "accuracy": 0.684154, + "f1": 0.664326, + "f1_weighted": 0.696295, + "ap": 0.796805, + "ap_weighted": 0.796805 + }, + { + "accuracy": 0.702355, + "f1": 0.668622, + "f1_weighted": 0.710053, + "ap": 0.786676, + "ap_weighted": 0.786676 + }, + { + "accuracy": 0.708779, + "f1": 0.682651, + "f1_weighted": 0.718334, + "ap": 0.800707, + "ap_weighted": 0.800707 + }, + { + "accuracy": 0.667024, + "f1": 0.654031, + "f1_weighted": 0.680303, + "ap": 0.800195, + "ap_weighted": 0.800195 + }, + { + "accuracy": 0.701285, + "f1": 0.681461, + "f1_weighted": 0.7126, + "ap": 0.807184, + "ap_weighted": 0.807184 + }, + { + "accuracy": 0.721627, + "f1": 0.702642, + "f1_weighted": 0.732085, + "ap": 0.821279, + "ap_weighted": 0.821279 + }, + { + "accuracy": 0.638116, + "f1": 0.612073, + "f1_weighted": 0.65146, + "ap": 0.76081, + "ap_weighted": 0.76081 + }, + { + "accuracy": 0.639186, + "f1": 0.618195, + "f1_weighted": 0.653276, + "ap": 0.768243, + "ap_weighted": 0.768243 + }, + { + "accuracy": 0.721627, + "f1": 0.698632, + "f1_weighted": 0.731253, + "ap": 0.813397, + "ap_weighted": 0.813397 + }, + { + "accuracy": 0.686296, + "f1": 0.673772, + "f1_weighted": 0.698819, + "ap": 0.814014, + "ap_weighted": 0.814014 + } + ], + "main_score": 0.687045, + "hf_subset": "de", + "languages": [ + "deu-Latn" + ] + } + ] + }, + "evaluation_time": 24.15538454055786, + "kg_co2_emissions": null +} \ No newline at end of file diff --git a/results/shibing624__text2vec-base-multilingual/6633dc49e554de7105458f8f2e96445c6598e9d1/AmazonReviewsClassification.json b/results/shibing624__text2vec-base-multilingual/6633dc49e554de7105458f8f2e96445c6598e9d1/AmazonReviewsClassification.json new file mode 100644 index 0000000000..9848038361 --- /dev/null +++ b/results/shibing624__text2vec-base-multilingual/6633dc49e554de7105458f8f2e96445c6598e9d1/AmazonReviewsClassification.json @@ -0,0 +1,137 @@ +{ + "dataset_revision": "1399c76144fd37290681b995c656ef9b2e06e26d", + "task_name": "AmazonReviewsClassification", + "mteb_version": "1.29.10", + "scores": { + "validation": [ + { + "accuracy": 0.33596, + "f1": 0.33313, + "f1_weighted": 0.33313, + "scores_per_experiment": [ + { + "accuracy": 0.3858, + "f1": 0.384151, + "f1_weighted": 0.384151 + }, + { + "accuracy": 0.32, + "f1": 0.316271, + "f1_weighted": 0.316271 + }, + { + "accuracy": 0.3356, + "f1": 0.328853, + "f1_weighted": 0.328853 + }, + { + "accuracy": 0.3396, + "f1": 0.334589, + "f1_weighted": 0.334589 + }, + { + "accuracy": 0.3334, + "f1": 0.331795, + "f1_weighted": 0.331795 + }, + { + "accuracy": 0.3446, + "f1": 0.343729, + "f1_weighted": 0.343729 + }, + { + "accuracy": 0.2794, + "f1": 0.281941, + "f1_weighted": 0.281941 + }, + { + "accuracy": 0.3552, + "f1": 0.354249, + "f1_weighted": 0.354249 + }, + { + "accuracy": 0.328, + "f1": 0.325996, + "f1_weighted": 0.325996 + }, + { + "accuracy": 0.338, + "f1": 0.329723, + "f1_weighted": 0.329723 + } + ], + "main_score": 0.33596, + "hf_subset": "de", + "languages": [ + "deu-Latn" + ] + } + ], + "test": [ + { + "accuracy": 0.33394, + "f1": 0.331978, + "f1_weighted": 0.331978, + "scores_per_experiment": [ + { + "accuracy": 0.3748, + "f1": 0.374319, + "f1_weighted": 0.374319 + }, + { + "accuracy": 0.325, + "f1": 0.323455, + "f1_weighted": 0.323455 + }, + { + "accuracy": 0.3342, + "f1": 0.329901, + "f1_weighted": 0.329901 + }, + { + "accuracy": 0.3386, + "f1": 0.335603, + "f1_weighted": 0.335603 + }, + { + "accuracy": 0.3326, + "f1": 0.332394, + "f1_weighted": 0.332394 + }, + { + "accuracy": 0.3364, + "f1": 0.337795, + "f1_weighted": 0.337795 + }, + { + "accuracy": 0.2918, + "f1": 0.294337, + "f1_weighted": 0.294337 + }, + { + "accuracy": 0.3466, + "f1": 0.34317, + "f1_weighted": 0.34317 + }, + { + "accuracy": 0.3226, + "f1": 0.319688, + "f1_weighted": 0.319688 + }, + { + "accuracy": 0.3368, + "f1": 0.329121, + "f1_weighted": 0.329121 + } + ], + "main_score": 0.33394, + "hf_subset": "de", + "languages": [ + "deu-Latn" + ] + } + ] + }, + "evaluation_time": 37.28035569190979, + "kg_co2_emissions": null +} \ No newline at end of file diff --git a/results/shibing624__text2vec-base-multilingual/6633dc49e554de7105458f8f2e96445c6598e9d1/MTOPDomainClassification.json b/results/shibing624__text2vec-base-multilingual/6633dc49e554de7105458f8f2e96445c6598e9d1/MTOPDomainClassification.json new file mode 100644 index 0000000000..f3a5a74589 --- /dev/null +++ b/results/shibing624__text2vec-base-multilingual/6633dc49e554de7105458f8f2e96445c6598e9d1/MTOPDomainClassification.json @@ -0,0 +1,137 @@ +{ + "dataset_revision": "d80d48c1eb48d3562165c59d59d0034df9fff0bf", + "task_name": "MTOPDomainClassification", + "mteb_version": "1.29.10", + "scores": { + "validation": [ + { + "accuracy": 0.778292, + "f1": 0.765488, + "f1_weighted": 0.779069, + "scores_per_experiment": [ + { + "accuracy": 0.772452, + "f1": 0.760727, + "f1_weighted": 0.770152 + }, + { + "accuracy": 0.780165, + "f1": 0.764418, + "f1_weighted": 0.77965 + }, + { + "accuracy": 0.77741, + "f1": 0.755123, + "f1_weighted": 0.777198 + }, + { + "accuracy": 0.783471, + "f1": 0.772344, + "f1_weighted": 0.783796 + }, + { + "accuracy": 0.76584, + "f1": 0.753206, + "f1_weighted": 0.767035 + }, + { + "accuracy": 0.792287, + "f1": 0.77434, + "f1_weighted": 0.791956 + }, + { + "accuracy": 0.800551, + "f1": 0.786026, + "f1_weighted": 0.800623 + }, + { + "accuracy": 0.768044, + "f1": 0.761453, + "f1_weighted": 0.772465 + }, + { + "accuracy": 0.774105, + "f1": 0.770192, + "f1_weighted": 0.777779 + }, + { + "accuracy": 0.768595, + "f1": 0.757055, + "f1_weighted": 0.770035 + } + ], + "main_score": 0.778292, + "hf_subset": "de", + "languages": [ + "deu-Latn" + ] + } + ], + "test": [ + { + "accuracy": 0.786081, + "f1": 0.771171, + "f1_weighted": 0.786473, + "scores_per_experiment": [ + { + "accuracy": 0.786137, + "f1": 0.77482, + "f1_weighted": 0.784724 + }, + { + "accuracy": 0.766413, + "f1": 0.747166, + "f1_weighted": 0.76688 + }, + { + "accuracy": 0.782192, + "f1": 0.757721, + "f1_weighted": 0.781977 + }, + { + "accuracy": 0.797971, + "f1": 0.782559, + "f1_weighted": 0.798152 + }, + { + "accuracy": 0.778247, + "f1": 0.764502, + "f1_weighted": 0.778991 + }, + { + "accuracy": 0.801071, + "f1": 0.784254, + "f1_weighted": 0.802288 + }, + { + "accuracy": 0.802761, + "f1": 0.78556, + "f1_weighted": 0.803478 + }, + { + "accuracy": 0.783601, + "f1": 0.77684, + "f1_weighted": 0.786652 + }, + { + "accuracy": 0.786419, + "f1": 0.776707, + "f1_weighted": 0.786679 + }, + { + "accuracy": 0.775993, + "f1": 0.761579, + "f1_weighted": 0.77491 + } + ], + "main_score": 0.786081, + "hf_subset": "de", + "languages": [ + "deu-Latn" + ] + } + ] + }, + "evaluation_time": 22.63331937789917, + "kg_co2_emissions": null +} \ No newline at end of file diff --git a/results/shibing624__text2vec-base-multilingual/6633dc49e554de7105458f8f2e96445c6598e9d1/MTOPIntentClassification.json b/results/shibing624__text2vec-base-multilingual/6633dc49e554de7105458f8f2e96445c6598e9d1/MTOPIntentClassification.json new file mode 100644 index 0000000000..38f185e9f0 --- /dev/null +++ b/results/shibing624__text2vec-base-multilingual/6633dc49e554de7105458f8f2e96445c6598e9d1/MTOPIntentClassification.json @@ -0,0 +1,137 @@ +{ + "dataset_revision": "ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba", + "task_name": "MTOPIntentClassification", + "mteb_version": "1.29.10", + "scores": { + "validation": [ + { + "accuracy": 0.546832, + "f1": 0.356526, + "f1_weighted": 0.593777, + "scores_per_experiment": [ + { + "accuracy": 0.555923, + "f1": 0.369012, + "f1_weighted": 0.599311 + }, + { + "accuracy": 0.551515, + "f1": 0.365264, + "f1_weighted": 0.601706 + }, + { + "accuracy": 0.567493, + "f1": 0.376636, + "f1_weighted": 0.610419 + }, + { + "accuracy": 0.557576, + "f1": 0.352619, + "f1_weighted": 0.607518 + }, + { + "accuracy": 0.536088, + "f1": 0.35328, + "f1_weighted": 0.58046 + }, + { + "accuracy": 0.524518, + "f1": 0.35448, + "f1_weighted": 0.574988 + }, + { + "accuracy": 0.543251, + "f1": 0.34267, + "f1_weighted": 0.593026 + }, + { + "accuracy": 0.552066, + "f1": 0.367481, + "f1_weighted": 0.596148 + }, + { + "accuracy": 0.537741, + "f1": 0.350639, + "f1_weighted": 0.587173 + }, + { + "accuracy": 0.542149, + "f1": 0.333179, + "f1_weighted": 0.587019 + } + ], + "main_score": 0.546832, + "hf_subset": "de", + "languages": [ + "deu-Latn" + ] + } + ], + "test": [ + { + "accuracy": 0.552578, + "f1": 0.379394, + "f1_weighted": 0.598045, + "scores_per_experiment": [ + { + "accuracy": 0.56523, + "f1": 0.38411, + "f1_weighted": 0.60955 + }, + { + "accuracy": 0.54466, + "f1": 0.379038, + "f1_weighted": 0.593165 + }, + { + "accuracy": 0.560158, + "f1": 0.393551, + "f1_weighted": 0.595091 + }, + { + "accuracy": 0.55255, + "f1": 0.36243, + "f1_weighted": 0.605802 + }, + { + "accuracy": 0.550859, + "f1": 0.383485, + "f1_weighted": 0.595259 + }, + { + "accuracy": 0.522119, + "f1": 0.360199, + "f1_weighted": 0.568394 + }, + { + "accuracy": 0.559031, + "f1": 0.393272, + "f1_weighted": 0.605839 + }, + { + "accuracy": 0.551423, + "f1": 0.381976, + "f1_weighted": 0.59388 + }, + { + "accuracy": 0.560158, + "f1": 0.378374, + "f1_weighted": 0.607985 + }, + { + "accuracy": 0.559594, + "f1": 0.377507, + "f1_weighted": 0.605487 + } + ], + "main_score": 0.552578, + "hf_subset": "de", + "languages": [ + "deu-Latn" + ] + } + ] + }, + "evaluation_time": 26.755773067474365, + "kg_co2_emissions": null +} \ No newline at end of file diff --git a/results/shibing624__text2vec-base-multilingual/6633dc49e554de7105458f8f2e96445c6598e9d1/model_meta.json b/results/shibing624__text2vec-base-multilingual/6633dc49e554de7105458f8f2e96445c6598e9d1/model_meta.json index 631baaf3d0..328bb9ac88 100644 --- a/results/shibing624__text2vec-base-multilingual/6633dc49e554de7105458f8f2e96445c6598e9d1/model_meta.json +++ b/results/shibing624__text2vec-base-multilingual/6633dc49e554de7105458f8f2e96445c6598e9d1/model_meta.json @@ -1 +1 @@ -{"name": "shibing624/text2vec-base-multilingual", "revision": "6633dc49e554de7105458f8f2e96445c6598e9d1", "release_date": "2023-06-22", "languages": ["zho_Hans", "eng_Latn", "deu_Latn", "fra_Latn", "ita_Latn", "nld_Latn", "por_Latn", "pol_Latn", "rus_Cyrl"], "n_parameters": 117654272, "memory_usage": null, "max_tokens": 512.0, "embed_dim": 384, "license": "apache-2.0", "open_weights": true, "public_training_data": true, "public_training_code": null, "framework": ["PyTorch", "Sentence Transformers"], "reference": "https://huggingface.co/shibing624/text2vec-base-multilingual", "similarity_fn_name": "cosine", "use_instructions": null, "training_datasets": {"shibing624/nli-zh-all": ["train"]}, "adapted_from": "sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2", "superseded_by": null, "loader": null} \ No newline at end of file +{"name": "shibing624/text2vec-base-multilingual", "revision": "6633dc49e554de7105458f8f2e96445c6598e9d1", "release_date": "2023-06-22", "languages": ["deu-Latn", "eng-Latn", "spa-Latn", "fra-Latn", "ita-Latn", "nld-Latn", "pol-Latn", "por-Latn", "rus-Cyrl", "zho-Hans"], "n_parameters": 118000000, "max_tokens": 256.0, "embed_dim": 384, "license": "apache-2.0", "open_weights": true, "public_training_code": null, "framework": ["Sentence Transformers", "PyTorch"], "reference": "https://huggingface.co/shibing624/text2vec-base-chinese-paraphrase", "similarity_fn_name": "cosine", "use_instructions": false, "training_datasets": {}, "adapted_from": "sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2", "superseded_by": null, "loader": null} \ No newline at end of file