mteb_dataset_name
string
| eval_language
string
| metric
string
| score
float32
-1.8
100
|
---|---|---|---|
"AmazonCounterfactualClassification" | "de" | "accuracy" | 57.098501 |
"AmazonCounterfactualClassification" | "de" | "ap" | 73.38225 |
"AmazonCounterfactualClassification" | "de" | "f1" | 55.183323 |
"AmazonCounterfactualClassification" | "en" | "accuracy" | 65.283582 |
"AmazonCounterfactualClassification" | "en" | "ap" | 28.022479 |
"AmazonCounterfactualClassification" | "en" | "f1" | 59.099774 |
"AmazonCounterfactualClassification" | "en-ext" | "accuracy" | 67.241379 |
"AmazonCounterfactualClassification" | "en-ext" | "ap" | 17.933371 |
"AmazonCounterfactualClassification" | "en-ext" | "f1" | 55.200711 |
"AmazonCounterfactualClassification" | "ja" | "accuracy" | 59.914347 |
"AmazonCounterfactualClassification" | "ja" | "ap" | 13.61092 |
"AmazonCounterfactualClassification" | "ja" | "f1" | 48.704647 |
"AmazonPolarityClassification" | "" | "accuracy" | 62.9849 |
"AmazonPolarityClassification" | "" | "ap" | 58.197015 |
"AmazonPolarityClassification" | "" | "f1" | 62.70402 |
"AmazonReviewsClassification" | "de" | "accuracy" | 25.908 |
"AmazonReviewsClassification" | "de" | "f1" | 25.53815 |
"AmazonReviewsClassification" | "en" | "accuracy" | 30.792 |
"AmazonReviewsClassification" | "en" | "f1" | 30.254565 |
"AmazonReviewsClassification" | "es" | "accuracy" | 27.634 |
"AmazonReviewsClassification" | "es" | "f1" | 27.287076 |
"AmazonReviewsClassification" | "fr" | "accuracy" | 27.54 |
"AmazonReviewsClassification" | "fr" | "f1" | 27.21486 |
"AmazonReviewsClassification" | "ja" | "accuracy" | 23.566 |
"AmazonReviewsClassification" | "ja" | "f1" | 23.349265 |
"AmazonReviewsClassification" | "zh" | "accuracy" | 22.99 |
"AmazonReviewsClassification" | "zh" | "f1" | 22.47175 |
"ArguAna" | "" | "map_at_1" | 23.257 |
"ArguAna" | "" | "map_at_10" | 38.083 |
"ArguAna" | "" | "map_at_100" | 39.264 |
"ArguAna" | "" | "map_at_1000" | 39.273 |
"ArguAna" | "" | "map_at_3" | 32.575 |
"ArguAna" | "" | "map_at_5" | 35.669 |
"ArguAna" | "" | "mrr_at_1" | 23.613 |
"ArguAna" | "" | "mrr_at_10" | 38.243 |
"ArguAna" | "" | "mrr_at_100" | 39.41 |
"ArguAna" | "" | "mrr_at_1000" | 39.419 |
"ArguAna" | "" | "mrr_at_3" | 32.883 |
"ArguAna" | "" | "mrr_at_5" | 35.767 |
"ArguAna" | "" | "ndcg_at_1" | 23.257 |
"ArguAna" | "" | "ndcg_at_10" | 47.128 |
"ArguAna" | "" | "ndcg_at_100" | 52.093 |
"ArguAna" | "" | "ndcg_at_1000" | 52.316 |
"ArguAna" | "" | "ndcg_at_3" | 35.794 |
"ArguAna" | "" | "ndcg_at_5" | 41.364 |
"ArguAna" | "" | "precision_at_1" | 23.257 |
"ArguAna" | "" | "precision_at_10" | 7.632 |
"ArguAna" | "" | "precision_at_100" | 0.979 |
"ArguAna" | "" | "precision_at_1000" | 0.1 |
"ArguAna" | "" | "precision_at_3" | 15.055 |
"ArguAna" | "" | "precision_at_5" | 11.735 |
"ArguAna" | "" | "recall_at_1" | 23.257 |
"ArguAna" | "" | "recall_at_10" | 76.316 |
"ArguAna" | "" | "recall_at_100" | 97.866 |
"ArguAna" | "" | "recall_at_1000" | 99.573 |
"ArguAna" | "" | "recall_at_3" | 45.164 |
"ArguAna" | "" | "recall_at_5" | 58.677 |
"ArxivClusteringP2P" | "" | "v_measure" | 46.069827 |
"ArxivClusteringS2S" | "" | "v_measure" | 37.501829 |
"AskUbuntuDupQuestions" | "" | "map" | 64.061606 |
"AskUbuntuDupQuestions" | "" | "mrr" | 77.400299 |
"BIOSSES" | "" | "cos_sim_pearson" | 86.733005 |
"BIOSSES" | "" | "cos_sim_spearman" | 83.567567 |
"BIOSSES" | "" | "euclidean_pearson" | 84.35154 |
"BIOSSES" | "" | "euclidean_spearman" | 83.567567 |
"BIOSSES" | "" | "manhattan_pearson" | 84.100877 |
"BIOSSES" | "" | "manhattan_spearman" | 83.583832 |
"Banking77Classification" | "" | "accuracy" | 80.402597 |
"Banking77Classification" | "" | "f1" | 79.793267 |
"BiorxivClusteringP2P" | "" | "v_measure" | 36.985834 |
"BiorxivClusteringS2S" | "" | "v_measure" | 33.207831 |
"CQADupstackAndroidRetrieval" | "" | "map_at_1" | 34.975 |
"CQADupstackAndroidRetrieval" | "" | "map_at_10" | 47.228 |
"CQADupstackAndroidRetrieval" | "" | "map_at_100" | 48.91 |
"CQADupstackAndroidRetrieval" | "" | "map_at_1000" | 49.016 |
"CQADupstackAndroidRetrieval" | "" | "map_at_3" | 43.334 |
"CQADupstackAndroidRetrieval" | "" | "map_at_5" | 45.353 |
"CQADupstackAndroidRetrieval" | "" | "mrr_at_1" | 43.348 |
"CQADupstackAndroidRetrieval" | "" | "mrr_at_10" | 53.744 |
"CQADupstackAndroidRetrieval" | "" | "mrr_at_100" | 54.432 |
"CQADupstackAndroidRetrieval" | "" | "mrr_at_1000" | 54.458 |
"CQADupstackAndroidRetrieval" | "" | "mrr_at_3" | 51.359 |
"CQADupstackAndroidRetrieval" | "" | "mrr_at_5" | 52.825 |
"CQADupstackAndroidRetrieval" | "" | "ndcg_at_1" | 43.348 |
"CQADupstackAndroidRetrieval" | "" | "ndcg_at_10" | 54.118 |
"CQADupstackAndroidRetrieval" | "" | "ndcg_at_100" | 59.496 |
"CQADupstackAndroidRetrieval" | "" | "ndcg_at_1000" | 60.847 |
"CQADupstackAndroidRetrieval" | "" | "ndcg_at_3" | 49.001 |
"CQADupstackAndroidRetrieval" | "" | "ndcg_at_5" | 51.245 |
"CQADupstackAndroidRetrieval" | "" | "precision_at_1" | 43.348 |
"CQADupstackAndroidRetrieval" | "" | "precision_at_10" | 10.658 |
"CQADupstackAndroidRetrieval" | "" | "precision_at_100" | 1.701 |
"CQADupstackAndroidRetrieval" | "" | "precision_at_1000" | 0.214 |
"CQADupstackAndroidRetrieval" | "" | "precision_at_3" | 23.701 |
"CQADupstackAndroidRetrieval" | "" | "precision_at_5" | 17.082 |
"CQADupstackAndroidRetrieval" | "" | "recall_at_1" | 34.975 |
"CQADupstackAndroidRetrieval" | "" | "recall_at_10" | 66.291 |
"CQADupstackAndroidRetrieval" | "" | "recall_at_100" | 88.727 |
"CQADupstackAndroidRetrieval" | "" | "recall_at_1000" | 97.267 |
"CQADupstackAndroidRetrieval" | "" | "recall_at_3" | 51.505 |
README.md exists but content is empty.
Use the Edit dataset card button to edit it.
- Downloads last month
- 4,678
Size of downloaded dataset files:
19 MB
Size of the auto-converted Parquet files:
514 kB
Number of rows:
44,709